[ns_server:info,2014-08-19T15:37:25.054,nonode@nohost:<0.58.0>:ns_server:init_logging:248]Started & configured logging [ns_server:info,2014-08-19T15:37:25.057,nonode@nohost:<0.58.0>:ns_server:log_pending:30]Static config terms: [{error_logger_mf_dir,"/opt/couchbase/var/lib/couchbase/logs"}, {error_logger_mf_maxbytes,10485760}, {error_logger_mf_maxfiles,20}, {path_config_bindir,"/opt/couchbase/bin"}, {path_config_etcdir,"/opt/couchbase/etc/couchbase"}, {path_config_libdir,"/opt/couchbase/lib"}, {path_config_datadir,"/opt/couchbase/var/lib/couchbase"}, {path_config_tmpdir,"/opt/couchbase/var/lib/couchbase/tmp"}, {nodefile,"/opt/couchbase/var/lib/couchbase/couchbase-server.node"}, {loglevel_default,debug}, {loglevel_couchdb,info}, {loglevel_ns_server,debug}, {loglevel_error_logger,debug}, {loglevel_user,debug}, {loglevel_menelaus,debug}, {loglevel_ns_doctor,debug}, {loglevel_stats,debug}, {loglevel_rebalance,debug}, {loglevel_cluster,debug}, {loglevel_views,debug}, {loglevel_mapreduce_errors,debug}, {loglevel_xdcr,debug}] [ns_server:info,2014-08-19T15:37:25.200,nonode@nohost:<0.58.0>:ns_server:start:58]Locked myself into a memory successfully. [error_logger:info,2014-08-19T15:37:25.243,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,crypto_sup} started: [{pid,<0.168.0>}, {name,crypto_server}, {mfargs,{crypto_server,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.243,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: crypto started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:25.252,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: asn1 started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:25.256,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: public_key started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:25.263,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.175.0>}, {name,ftp_sup}, {mfargs,{ftp_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:25.283,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,httpc_profile_sup} started: [{pid,<0.178.0>}, {name,httpc_manager}, {mfargs, {httpc_manager,start_link, [default,only_session_cookies,inets]}}, {restart_type,permanent}, {shutdown,4000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.283,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,httpc_sup} started: [{pid,<0.177.0>}, {name,httpc_profile_sup}, {mfargs, {httpc_profile_sup,start_link, [[{httpc,{default,only_session_cookies}}]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:25.285,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,httpc_sup} started: [{pid,<0.179.0>}, {name,httpc_handler_sup}, {mfargs,{httpc_handler_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:25.285,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.176.0>}, {name,httpc_sup}, {mfargs, {httpc_sup,start_link, [[{httpc,{default,only_session_cookies}}]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:25.288,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.180.0>}, {name,httpd_sup}, {mfargs,{httpd_sup,start_link,[[]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:25.291,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.181.0>}, {name,tftp_sup}, {mfargs,{tftp_sup,start_link,[[]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:25.291,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: inets started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:25.291,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: oauth started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:25.299,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.187.0>}, {name,ssl_broker_sup}, {mfargs,{ssl_broker_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:25.305,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.188.0>}, {name,ssl_manager}, {mfargs,{ssl_manager,start_link,[[]]}}, {restart_type,permanent}, {shutdown,4000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.307,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.189.0>}, {name,ssl_connection}, {mfargs,{ssl_connection_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,4000}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:25.307,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: ssl started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:25.458,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.196.0>}, {name,ssl_server}, {mfargs,{ssl_server,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.459,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,lhttpc_sup} started: [{pid,<0.194.0>}, {name,lhttpc_manager}, {mfargs, {lhttpc_manager,start_link, [[{name,lhttpc_manager}]]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.459,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: lhttpc started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:25.462,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: xmerl started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:25.471,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: compiler started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:25.475,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: syntax_tools started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:25.475,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: mochiweb started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:25.477,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: couch_view_parser started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:25.480,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: couch_set_view started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:25.482,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: couch_index_merger started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:25.484,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: mapreduce started_at: nonode@nohost [error_logger:info,2014-08-19T15:37:25.516,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.205.0>}, {name,couch_config}, {mfargs, {couch_server_sup,couch_config_start_link_wrapper, [["/opt/couchbase/etc/couchdb/default.ini", "/opt/couchbase/etc/couchdb/default.d/capi.ini", "/opt/couchbase/etc/couchdb/default.d/geocouch.ini", "/opt/couchbase/etc/couchdb/local.ini"], <0.205.0>]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.538,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.208.0>}, {name,collation_driver}, {mfargs,{couch_drv,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:25.538,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.209.0>}, {name,couch_task_events}, {mfargs, {gen_event,start_link,[{local,couch_task_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.540,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.210.0>}, {name,couch_task_status}, {mfargs,{couch_task_status,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.541,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.211.0>}, {name,couch_file_write_guard}, {mfargs,{couch_file_write_guard,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.549,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.212.0>}, {name,couch_server}, {mfargs,{couch_server,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.550,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.213.0>}, {name,couch_db_update_event}, {mfargs, {gen_event,start_link,[{local,couch_db_update}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.550,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.214.0>}, {name,couch_replication_event}, {mfargs, {gen_event,start_link,[{local,couch_replication}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.551,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.215.0>}, {name,couch_replication_supervisor}, {mfargs,{couch_rep_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:25.553,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.216.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.555,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.217.0>}, {name,couch_main_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_main_index_barrier, "max_parallel_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.555,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.218.0>}, {name,couch_replica_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_replica_index_barrier, "max_parallel_replica_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.556,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.219.0>}, {name,couch_spatial_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_spatial_index_barrier, "max_parallel_spatial_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.556,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.207.0>}, {name,couch_primary_services}, {mfargs,{couch_primary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:25.559,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.221.0>}, {name,couch_db_update_notifier_sup}, {mfargs,{couch_db_update_notifier_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:25.610,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.222.0>}, {name,auth_cache}, {mfargs,{couch_auth_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.619,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.233.0>}, {name,set_view_manager}, {mfargs,{couch_set_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.622,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.236.0>}, {name,spatial_manager}, {mfargs,{couch_spatial,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.622,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.238.0>}, {name,index_merger_pool}, {mfargs, {lhttpc_manager,start_link, [[{connection_timeout,90000}, {pool_size,10000}, {name,couch_index_merger_connection_pool}]]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.626,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.239.0>}, {name,query_servers}, {mfargs,{couch_query_servers,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.628,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.241.0>}, {name,couch_set_view_ddoc_cache}, {mfargs,{couch_set_view_ddoc_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.631,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.243.0>}, {name,view_manager}, {mfargs,{couch_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.646,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.245.0>}, {name,httpd}, {mfargs,{couch_httpd,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.646,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.262.0>}, {name,uuids}, {mfargs,{couch_uuids,start,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.646,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.220.0>}, {name,couch_secondary_services}, {mfargs,{couch_secondary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:25.646,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,cb_couch_sup} started: [{pid,<0.206.0>}, {name,couch_app}, {mfargs, {couch_app,start, [fake, ["/opt/couchbase/etc/couchdb/default.ini", "/opt/couchbase/etc/couchdb/local.ini"]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:25.646,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.163.0>}, {name,cb_couch_sup}, {mfargs,{cb_couch_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,supervisor}] [ns_server:info,2014-08-19T15:37:25.654,nonode@nohost:ns_server_cluster_sup<0.162.0>:log_os_info:start_link:25]OS type: {unix,linux} Version: {2,6,32} Runtime info: [{otp_release,"R14B04"}, {erl_version,"5.8.5"}, {erl_version_long, "Erlang R14B04 (erts-5.8.5) [source] [64-bit] [smp:24:24] [rq:24] [async-threads:16] [kernel-poll:true]\n"}, {system_arch_raw,"x86_64-unknown-linux-gnu"}, {system_arch,"x86_64-unknown-linux-gnu"}, {localtime,{{2014,8,19},{15,37,25}}}, {memory, [{total,561000192}, {processes,5723504}, {processes_used,5711640}, {system,555276688}, {atom,833185}, {atom_used,825581}, {binary,48936}, {code,7877161}, {ets,648976}]}, {loaded, [ns_info,log_os_info,couch_config_writer,cb_init_loggers, mochiweb_acceptor,inet_tcp,gen_tcp,mochiweb_socket, mochiweb_socket_server,mochilists,mochiweb_http,eval_bits, couch_httpd,couch_view,couch_set_view_ddoc_cache, couch_query_servers,couch_spatial,mapreduce, couch_set_view,snappy,couch_compress, couch_spatial_validation,couch_set_view_mapreduce,ejson, couch_doc,couch_db_update_notifier,couch_btree, couch_ref_counter,couch_uuids,couch_db_updater,couch_db, couch_auth_cache,couch_db_update_notifier_sup, couch_secondary_sup,queue,couch_index_barrier, couch_event_sup,couch_log,couch_rep_sup,httpd_util, filelib,couch_file,couch_file_write_guard, couch_task_status,erl_ddll,couch_drv,couch_primary_sup, couch_server,string,re,file2,couch_util,couch_config, couch_server_sup,ssl_server,crypto,ssl,lhttpc_manager, lhttpc_sup,lhttpc,ssl_connection_sup,ssl_session_cache, ssl_certificate_db,ssl_manager,ssl_broker_sup,ssl_sup, ssl_app,tftp_sup,httpd_sup,httpc_handler_sup,httpc_cookie, inets,httpc_manager,httpc,httpc_profile_sup,httpc_sup, ftp_sup,inets_sup,inets_app,crypto_server,crypto_sup, crypto_app,couch_app,cb_couch_sup,ns_server_cluster_sup, mlockall,calendar,ale_default_formatter,otp_internal,misc, 'ale_logger-xdcr','ale_logger-mapreduce_errors', 'ale_logger-views',timer,io_lib_fread, 'ale_logger-cluster','ale_logger-rebalance', 'ale_logger-stats','ale_logger-ns_doctor', 'ale_logger-menelaus','ale_logger-user', 'ale_logger-ns_server','ale_logger-couchdb',ns_log_sink, disk_log_sup,disk_log_server,disk_log_1,disk_log, ale_disk_sink,ns_server,cpu_sup,memsup,disksup,os_mon,io, release_handler,overload,alarm_handler,log_mf_h,sasl, ale_error_logger_handler,'ale_logger-ale_logger', 'ale_logger-error_logger',beam_opcodes,beam_dict,beam_asm, beam_validator,beam_flatten,beam_trim,beam_receive, beam_bsm,beam_peep,beam_dead,beam_type,beam_bool, beam_clean,beam_utils,beam_jump,beam_block,v3_codegen, v3_life,v3_kernel,sys_core_dsetel,erl_bifs,sys_core_fold, cerl_trees,sys_core_inline,core_lib,cerl,v3_core,erl_bits, erl_expand_records,sys_pre_expand,sofs,erl_internal,sets, ordsets,erl_lint,compile,dynamic_compile,ale_utils, io_lib_pretty,io_lib_format,io_lib,ale_codegen,dict,ale, ale_dynamic_sup,ale_sup,ale_app,ns_bootstrap,child_erlang, file_io_server,orddict,erl_eval,file,c,kernel_config, user_sup,supervisor_bridge,standard_error,unicode,binary, ets,gb_sets,hipe_unified_loader,packages,code_server,code, file_server,net_kernel,global_group,erl_distribution, filename,inet_gethost_native,os,inet_parse,inet,inet_udp, inet_config,inet_db,global,gb_trees,rpc,supervisor,kernel, application_master,sys,application,gen_server,erl_parse, proplists,erl_scan,lists,application_controller,proc_lib, gen,gen_event,error_logger,heart,error_handler,erlang, erl_prim_loader,prim_zip,zlib,prim_file,prim_inet,init, otp_ring0]}, {applications, [{public_key,"Public key infrastructure","0.13"}, {asn1,"The Erlang ASN1 compiler version 1.6.18","1.6.18"}, {lhttpc,"Lightweight HTTP Client","1.3.0"}, {ale,"Another Logger for Erlang","8ca6d2a"}, {os_mon,"CPO CXC 138 46","2.2.7"}, {couch_set_view,"Set views","1.2.0a-a425d97-git"}, {compiler,"ERTS CXC 138 10","4.7.5"}, {inets,"INETS CXC 138 49","5.7.1"}, {couch,"Apache CouchDB","1.2.0a-a425d97-git"}, {mapreduce,"MapReduce using V8 JavaScript engine","1.0.0"}, {couch_index_merger,"Index merger","1.2.0a-a425d97-git"}, {kernel,"ERTS CXC 138 10","2.14.5"}, {crypto,"CRYPTO version 2","2.0.4"}, {ssl,"Erlang/OTP SSL application","4.1.6"}, {sasl,"SASL CXC 138 11","2.1.10"}, {couch_view_parser,"Couch view parser","1.0.0"}, {ns_server,"Couchbase server","2.5.1-1083-rel-enterprise"}, {mochiweb,"MochiMedia Web Server","2.4.2"}, {syntax_tools,"Syntax tools","1.6.7.1"}, {xmerl,"XML parser","1.2.10"}, {oauth,"Erlang OAuth implementation","7d85d3ef"}, {stdlib,"ERTS CXC 138 10","1.17.5"}]}, {pre_loaded, [erlang,erl_prim_loader,prim_zip,zlib,prim_file,prim_inet, init,otp_ring0]}, {process_count,147}, {node,nonode@nohost}, {nodes,[]}, {registered, [kernel_safe_sup,couch_db_update_notifier_sup, couch_auth_cache,couch_rep_sup,os_mon_sup,couch_view, cpu_sup,couch_server_sup,memsup,disksup, couch_query_servers,ns_server_cluster_sup, couch_task_status,couch_log,httpd_sup,couch_httpd, couch_drv,ssl_connection_sup,couch_file_write_guard, couch_set_view_ddoc_cache,cb_couch_sup,ssl_manager, error_logger,couch_index_merger_connection_pool, sasl_safe_sup,'sink-ns_log','sink-disk_stats',ale_sup, couch_spatial,standard_error,'sink-disk_xdcr_errors', 'sink-disk_xdcr','sink-disk_debug',standard_error_sup, ale_dynamic_sup,'sink-disk_couchdb', 'sink-disk_mapreduce_errors','sink-disk_views', ssl_broker_sup,'sink-disk_error',ssl_server,timer_server, ssl_sup,ale,httpc_sup,httpc_profile_sup,httpc_manager, httpc_handler_sup,erl_prim_loader,inet_db,ftp_sup, sasl_sup,couch_spatial_index_barrier,rex, couch_replica_index_barrier,kernel_sup, couch_main_index_barrier,global_name_server,inets_sup, lhttpc_sup,couch_replication,crypto_server,file_server_2, crypto_sup,global_group,couch_task_events, couch_secondary_services,couch_primary_services, release_handler,couch_db_update,init,overload, couch_config,alarm_handler,couch_set_view,disk_log_sup, disk_log_server,couch_server,code_server,couch_uuids, application_controller,lhttpc_manager,tftp_sup, 'sink-disk_default']}, {cookie,nocookie}, {wordsize,8}, {wall_clock,1}] [ns_server:info,2014-08-19T15:37:25.658,nonode@nohost:ns_server_cluster_sup<0.162.0>:log_os_info:start_link:27]Manifest: ["","", " ", " ", " ", " ", " ", " ", " "," ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "," "] [error_logger:info,2014-08-19T15:37:25.660,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.264.0>}, {name,timeout_diag_logger}, {mfargs,{timeout_diag_logger,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T15:37:25.661,nonode@nohost:dist_manager<0.265.0>:dist_manager:read_address_config_from_path:83]Reading ip config from "/opt/couchbase/var/lib/couchbase/ip_start" [ns_server:info,2014-08-19T15:37:25.661,nonode@nohost:dist_manager<0.265.0>:dist_manager:read_address_config_from_path:83]Reading ip config from "/opt/couchbase/var/lib/couchbase/ip" [ns_server:info,2014-08-19T15:37:25.661,nonode@nohost:dist_manager<0.265.0>:dist_manager:init:159]ip config not found. Looks like we're brand new node [error_logger:info,2014-08-19T15:37:25.661,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,inet_gethost_native_sup} started: [{pid,<0.267.0>},{mfa,{inet_gethost_native,init,[[]]}}] [error_logger:info,2014-08-19T15:37:25.662,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,kernel_safe_sup} started: [{pid,<0.266.0>}, {name,inet_gethost_native_sup}, {mfargs,{inet_gethost_native,start_link,[]}}, {restart_type,temporary}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T15:37:25.794,nonode@nohost:dist_manager<0.265.0>:dist_manager:bringup:230]Attempting to bring up net_kernel with name 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T15:37:25.797,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.269.0>}, {name,erl_epmd}, {mfargs,{erl_epmd,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.797,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.270.0>}, {name,auth}, {mfargs,{auth,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [ns_server:info,2014-08-19T15:37:25.798,ns_1@127.0.0.1:dist_manager<0.265.0>:dist_manager:save_node:143]saving node to "/opt/couchbase/var/lib/couchbase/couchbase-server.node" [error_logger:info,2014-08-19T15:37:25.798,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.271.0>}, {name,net_kernel}, {mfargs, {net_kernel,start_link, [['ns_1@127.0.0.1',longnames]]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.799,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,kernel_sup} started: [{pid,<0.268.0>}, {name,net_sup_dynamic}, {mfargs, {erl_distribution,start_link, [['ns_1@127.0.0.1',longnames]]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [ns_server:debug,2014-08-19T15:37:25.818,ns_1@127.0.0.1:dist_manager<0.265.0>:dist_manager:bringup:238]Attempted to save node name to disk: ok [error_logger:info,2014-08-19T15:37:25.818,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.265.0>}, {name,dist_manager}, {mfargs,{dist_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.819,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.274.0>}, {name,ns_cookie_manager}, {mfargs,{ns_cookie_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.822,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.275.0>}, {name,ns_cluster}, {mfargs,{ns_cluster,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [ns_server:info,2014-08-19T15:37:25.823,ns_1@127.0.0.1:ns_config_sup<0.276.0>:ns_config_sup:init:32]loading static ns_config from "/opt/couchbase/etc/couchbase/config" [error_logger:info,2014-08-19T15:37:25.823,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.277.0>}, {name,ns_config_events}, {mfargs, {gen_event,start_link,[{local,ns_config_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.823,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.278.0>}, {name,ns_config_events_local}, {mfargs, {gen_event,start_link, [{local,ns_config_events_local}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2014-08-19T15:37:25.836,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config:load_config:795]Loading static config from "/opt/couchbase/etc/couchbase/config" [ns_server:info,2014-08-19T15:37:25.837,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config:load_config:809]Loading dynamic config from "/opt/couchbase/var/lib/couchbase/config/config.dat" [ns_server:info,2014-08-19T15:37:25.837,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config:load_config:813]No dynamic config file found. Assuming we're brand new node [ns_server:debug,2014-08-19T15:37:25.838,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config:load_config:816]Here's full dynamic config we loaded: [] [ns_server:info,2014-08-19T15:37:25.838,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config:load_config:827]Here's full dynamic config we loaded + static & default config: [{replication_topology,star}, {drop_request_memory_threshold_mib,undefined}, {{request_limit,capi},undefined}, {{request_limit,rest},undefined}, {auto_failover_cfg,[{enabled,false},{timeout,120},{max_nodes,1},{count,0}]}, {replication,[{enabled,true}]}, {alert_limits,[{max_overhead_perc,50},{max_disk_used,90}]}, {email_alerts, [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{<<"d026346e5ade950537d992bcb312c471">>,{1,63575667445}}]}, {filename,"/opt/couchbase/var/lib/couchbase/ns_log"}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B","binary", "-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol,stream]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {buckets,[{configs,[]}]}, {memory_quota,58026}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{<<"d026346e5ade950537d992bcb312c471">>,{1,63575667445}}]}, {port,11210}, {mccouch_port,11213}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{<<"d026346e5ade950537d992bcb312c471">>,{1,63575667445}}]}, {path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}]}, {remote_clusters,[]}, {rest_creds,[{creds,[]}]}, {{node,'ns_1@127.0.0.1',ssl_proxy_upstream_port},11215}, {{node,'ns_1@127.0.0.1',ssl_proxy_downstream_port},11214}, {{node,'ns_1@127.0.0.1',ssl_capi_port},18092}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{node,'ns_1@127.0.0.1',ssl_rest_port},18091}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{couchdb,max_parallel_replica_indexers},2}, {{couchdb,max_parallel_indexers},4}, {rest,[{port,8091}]}, {{node,'ns_1@127.0.0.1',membership},active}, {nodes_wanted,['ns_1@127.0.0.1']}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {max_bucket_count,10}, {index_aware_rebalance_disabled,false}] [ns_server:info,2014-08-19T15:37:25.840,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config_default:upgrade_config_from_1_7_to_1_7_1:342]Upgrading config from 1.7 to 1.7.1 [ns_server:debug,2014-08-19T15:37:25.840,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config:do_upgrade_config:577]Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{1,7,1}}, {set,email_alerts, [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server,[{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small,ip,disk,overhead, ep_oom_errors,ep_item_commit_failed]}]}, {set,auto_failover_cfg, [{enabled,false},{timeout,120},{max_nodes,1},{count,0}]}] [ns_server:info,2014-08-19T15:37:25.841,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config_default:upgrade_config_from_1_7_1_to_1_7_2:353]Upgrading config from 1.7.1 to 1.7.2 [ns_server:debug,2014-08-19T15:37:25.841,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config:do_upgrade_config:577]Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{1,7,2}}] [ns_server:info,2014-08-19T15:37:25.841,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config_default:upgrade_config_from_1_7_2_to_1_8_0:407]Upgrading config from 1.7.2 to 1.8.0 [ns_server:debug,2014-08-19T15:37:25.842,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config:do_upgrade_config:577]Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{1,8,0}}, {set,{node,'ns_1@127.0.0.1',port_servers}, [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR", {"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so", "-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status, port_server_send_eol,stream]}]}] [ns_server:info,2014-08-19T15:37:25.843,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config_default:upgrade_config_from_1_8_0_to_1_8_1:444]Upgrading config from 1.8.0 to 1.8.1 [ns_server:debug,2014-08-19T15:37:25.844,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config:do_upgrade_config:577]Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{1,8,1}}, {set, {node,'ns_1@127.0.0.1',memcached}, [{dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {port,11210}, {mccouch_port,11213}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {verbosity,[]}]}, {set, {node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{<<"dfb2b8050a9477d75b4803bd9015782d">>,{1,63575667445}}]}, {path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}]}, {set, {node,'ns_1@127.0.0.1',port_servers}, [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B","binary", "-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {set, {node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{<<"dfb2b8050a9477d75b4803bd9015782d">>,{1,63575667445}}]}, {filename,"/opt/couchbase/var/lib/couchbase/ns_log"}]}] [ns_server:info,2014-08-19T15:37:25.845,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config_default:upgrade_config_from_1_8_1_to_2_0:473]Upgrading config from 1.8.1 to 2.0 [ns_server:debug,2014-08-19T15:37:25.846,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config:do_upgrade_config:577]Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{2,0}}, {set, {node,'ns_1@127.0.0.1',memcached}, [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}]}, {set, {node,'ns_1@127.0.0.1',port_servers}, [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B","binary", "-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}] [ns_server:info,2014-08-19T15:37:25.846,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config_default:upgrade_config_from_2_0_to_2_2_0:542]Upgrading config from 2.0 to 2.2.0 [ns_server:debug,2014-08-19T15:37:25.847,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config:do_upgrade_config:577]Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{2,2,0}}] [ns_server:info,2014-08-19T15:37:25.847,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config_default:upgrade_config_from_2_2_0_to_2_3_0:549]Upgrading config from 2.2.0 to 2.3.0 [ns_server:debug,2014-08-19T15:37:25.848,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config:do_upgrade_config:577]Upgrading config by changes: [{set,{node,'ns_1@127.0.0.1',config_version},{2,3,0}}, {set, {node,'ns_1@127.0.0.1',memcached}, [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}]}] [ns_server:debug,2014-08-19T15:37:25.848,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config:do_init:626]Upgraded initial config: {config, {full,"/opt/couchbase/etc/couchbase/config",undefined,ns_config_default}, [[], [{directory,"/opt/couchbase/var/lib/couchbase/config"}, {index_aware_rebalance_disabled,false}, {max_bucket_count,10}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {nodes_wanted,['ns_1@127.0.0.1']}, {{node,'ns_1@127.0.0.1',membership},active}, {rest,[{port,8091}]}, {{couchdb,max_parallel_indexers},4}, {{couchdb,max_parallel_replica_indexers},2}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',ssl_rest_port},18091}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{node,'ns_1@127.0.0.1',ssl_capi_port},18092}, {{node,'ns_1@127.0.0.1',ssl_proxy_downstream_port},11214}, {{node,'ns_1@127.0.0.1',ssl_proxy_upstream_port},11215}, {rest_creds,[{creds,[]}]}, {remote_clusters,[]}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock',[{<<"d026346e5ade950537d992bcb312c471">>,{1,63575667445}}]}, {path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock',[{<<"d026346e5ade950537d992bcb312c471">>,{1,63575667445}}]}, {port,11210}, {mccouch_port,11213}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {verbosity,[]}]}, {memory_quota,58026}, {buckets,[{configs,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock',[{<<"d026346e5ade950537d992bcb312c471">>,{1,63575667445}}]}, {filename,"/opt/couchbase/var/lib/couchbase/ns_log"}]}, {email_alerts, [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small,ip,disk,overhead, ep_oom_errors,ep_item_commit_failed]}]}, {alert_limits,[{max_overhead_perc,50},{max_disk_used,90}]}, {replication,[{enabled,true}]}, {auto_failover_cfg, [{enabled,false},{timeout,120},{max_nodes,1},{count,0}]}, {{request_limit,rest},undefined}, {{request_limit,capi},undefined}, {drop_request_memory_threshold_mib,undefined}, {replication_topology,star}]], [[{{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{7,63575667445}}]}|{2,3,0}]}, {alert_limits,[{max_overhead_perc,50},{max_disk_used,90}]}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667445}}]}, {enabled,false}, {timeout,120}, {max_nodes,1}, {count,0}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {buckets,[{configs,[]}]}, {drop_request_memory_threshold_mib,undefined}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667445}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down, auto_failover_cluster_too_small,ip,disk,overhead, ep_oom_errors,ep_item_commit_failed]}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {index_aware_rebalance_disabled,false}, {max_bucket_count,10}, {memory_quota,58026}, {nodes_wanted,['ns_1@127.0.0.1']}, {remote_clusters,[]}, {replication,[{enabled,true}]}, {replication_topology,star}, {rest,[{port,8091}]}, {rest_creds,[{creds,[]}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {{couchdb,max_parallel_indexers},4}, {{couchdb,max_parallel_replica_indexers},2}, {{request_limit,capi},undefined}, {{request_limit,rest},undefined}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock', [{'ns_1@127.0.0.1',{1,63575667445}}, {<<"d026346e5ade950537d992bcb312c471">>,{1,63575667445}}]}, {path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock', [{'ns_1@127.0.0.1',{3,63575667445}}, {<<"d026346e5ade950537d992bcb312c471">>,{1,63575667445}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock', [{'ns_1@127.0.0.1',{1,63575667445}}, {<<"d026346e5ade950537d992bcb312c471">>,{1,63575667445}}]}, {filename,"/opt/couchbase/var/lib/couchbase/ns_log"}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{3,63575667445}}]}, {moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD", {"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout, stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',ssl_capi_port},18092}, {{node,'ns_1@127.0.0.1',ssl_proxy_downstream_port},11214}, {{node,'ns_1@127.0.0.1',ssl_proxy_upstream_port},11215}, {{node,'ns_1@127.0.0.1',ssl_rest_port},18091}]], ns_config_default, {ns_config,save_config_sync,[]}, undefined,false} [error_logger:info,2014-08-19T15:37:25.851,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.279.0>}, {name,ns_config}, {mfargs, {ns_config,start_link, ["/opt/couchbase/etc/couchbase/config", ns_config_default]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.852,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.282.0>}, {name,ns_config_remote}, {mfargs, {ns_config_replica,start_link, [{local,ns_config_remote}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.853,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.283.0>}, {name,ns_config_log}, {mfargs,{ns_config_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.886,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.285.0>}, {name,cb_config_couch_sync}, {mfargs,{cb_config_couch_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.886,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.276.0>}, {name,ns_config_sup}, {mfargs,{ns_config_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:25.887,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.287.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.892,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.289.0>}, {name,diag_handler_worker}, {mfa,{work_queue,start_link,[diag_handler_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T15:37:25.893,ns_1@127.0.0.1:ns_server_sup<0.288.0>:dir_size:start_link:47]Starting quick version of dir_size with program name: i386-linux-godu [error_logger:info,2014-08-19T15:37:25.894,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.290.0>}, {name,dir_size}, {mfa,{dir_size,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.895,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.291.0>}, {name,request_throttler}, {mfa,{request_throttler,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.898,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,kernel_safe_sup} started: [{pid,<0.293.0>}, {name,timer2_server}, {mfargs,{timer2,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:warn,2014-08-19T15:37:25.899,ns_1@127.0.0.1:ns_log<0.292.0>:ns_log:read_logs:123]Couldn't load logs from "/opt/couchbase/var/lib/couchbase/ns_log" (perhaps it's first startup): {error, enoent} [error_logger:info,2014-08-19T15:37:25.899,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.292.0>}, {name,ns_log}, {mfa,{ns_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.899,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.294.0>}, {name,ns_crash_log_consumer}, {mfa,{ns_log,start_link_crash_consumer,[]}}, {restart_type,{permanent,4}}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:25.900,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.295.0>}, {name,ns_config_ets_dup}, {mfa,{ns_config_ets_dup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:25.900,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',config_version} -> {2,3,0} [ns_server:debug,2014-08-19T15:37:25.901,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: alert_limits -> [{max_overhead_perc,50},{max_disk_used,90}] [ns_server:debug,2014-08-19T15:37:25.901,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: auto_failover_cfg -> [{enabled,false},{timeout,120},{max_nodes,1},{count,0}] [ns_server:debug,2014-08-19T15:37:25.901,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: autocompaction -> [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T15:37:25.902,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[]}] [ns_server:debug,2014-08-19T15:37:25.902,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: drop_request_memory_threshold_mib -> undefined [ns_server:debug,2014-08-19T15:37:25.902,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: email_alerts -> [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server,[{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}] [ns_server:debug,2014-08-19T15:37:25.902,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: fast_warmup -> [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}] [ns_server:debug,2014-08-19T15:37:25.903,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: index_aware_rebalance_disabled -> false [ns_server:debug,2014-08-19T15:37:25.903,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: max_bucket_count -> 10 [ns_server:debug,2014-08-19T15:37:25.903,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: memory_quota -> 58026 [ns_server:debug,2014-08-19T15:37:25.903,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@127.0.0.1'] [ns_server:debug,2014-08-19T15:37:25.903,ns_1@127.0.0.1:ns_config_isasl_sync<0.298.0>:ns_config_isasl_sync:init:63]isasl_sync init: ["/opt/couchbase/var/lib/couchbase/isasl.pw","_admin", "051984933ac39a02e4056d80a45e8c36"] [ns_server:debug,2014-08-19T15:37:25.903,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: remote_clusters -> [] [ns_server:debug,2014-08-19T15:37:25.903,ns_1@127.0.0.1:ns_config_isasl_sync<0.298.0>:ns_config_isasl_sync:init:71]isasl_sync init buckets: [] [ns_server:debug,2014-08-19T15:37:25.903,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: replication -> [{enabled,true}] [ns_server:debug,2014-08-19T15:37:25.903,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: replication_topology -> star [ns_server:debug,2014-08-19T15:37:25.905,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: rest -> [{port,8091}] [ns_server:info,2014-08-19T15:37:25.905,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:handle_info:63]config change: rest_creds -> ******** [ns_server:debug,2014-08-19T15:37:25.905,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: set_view_update_daemon -> [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}] [ns_server:debug,2014-08-19T15:37:25.905,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_indexers} -> 4 [ns_server:debug,2014-08-19T15:37:25.905,ns_1@127.0.0.1:ns_config_isasl_sync<0.298.0>:ns_config_isasl_sync:writeSASLConf:143]Writing isasl passwd file: "/opt/couchbase/var/lib/couchbase/isasl.pw" [ns_server:debug,2014-08-19T15:37:25.905,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_replica_indexers} -> 2 [ns_server:debug,2014-08-19T15:37:25.905,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {request_limit,capi} -> undefined [ns_server:debug,2014-08-19T15:37:25.905,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {request_limit,rest} -> undefined [ns_server:debug,2014-08-19T15:37:25.905,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',capi_port} -> 8092 [ns_server:debug,2014-08-19T15:37:25.905,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T15:37:25.905,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T15:37:25.906,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',membership} -> active [ns_server:debug,2014-08-19T15:37:25.906,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T15:37:25.906,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T15:37:25.906,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [ns_server:warn,2014-08-19T15:37:25.909,ns_1@127.0.0.1:ns_config_isasl_sync<0.298.0>:ns_memcached:connect:1161]Unable to connect: {error,{badmatch,{error,econnrefused}}}, retrying. [ns_server:debug,2014-08-19T15:37:25.909,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T15:37:25.909,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T15:37:25.909,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T15:37:25.910,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T15:37:25.910,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T15:37:25.910,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_rest_port} -> 18091 [error_logger:info,2014-08-19T15:37:26.910,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.298.0>}, {name,ns_config_isasl_sync}, {mfa,{ns_config_isasl_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:26.910,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.302.0>}, {name,ns_log_events}, {mfa,{gen_event,start_link,[{local,ns_log_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:26.911,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.304.0>}, {name,ns_node_disco_events}, {mfargs, {gen_event,start_link, [{local,ns_node_disco_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:26.911,ns_1@127.0.0.1:ns_node_disco<0.305.0>:ns_node_disco:init:103]Initting ns_node_disco with [] [ns_server:debug,2014-08-19T15:37:26.912,ns_1@127.0.0.1:ns_cookie_manager<0.274.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [user:info,2014-08-19T15:37:26.912,ns_1@127.0.0.1:ns_cookie_manager<0.274.0>:ns_cookie_manager:do_cookie_init:86]Initial otp cookie generated: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T15:37:26.912,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: otp -> [{cookie,xyzevwdfypcplvpp}] [ns_server:debug,2014-08-19T15:37:26.912,ns_1@127.0.0.1:ns_cookie_manager<0.274.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T15:37:26.953,ns_1@127.0.0.1:ns_cookie_manager<0.274.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T15:37:26.953,ns_1@127.0.0.1:<0.306.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T15:37:26.954,ns_1@127.0.0.1:<0.306.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: xyzevwdfypcplvpp [error_logger:info,2014-08-19T15:37:26.954,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.305.0>}, {name,ns_node_disco}, {mfargs,{ns_node_disco,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:26.954,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.309.0>}, {name,ns_node_disco_log}, {mfargs,{ns_node_disco_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:26.955,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.310.0>}, {name,ns_node_disco_conf_events}, {mfargs,{ns_node_disco_conf_events,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:26.956,ns_1@127.0.0.1:ns_config_rep<0.312.0>:ns_config_rep:init:66]init pulling [error_logger:info,2014-08-19T15:37:26.956,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.311.0>}, {name,ns_config_rep_merger}, {mfargs,{ns_config_rep,start_link_merger,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:26.956,ns_1@127.0.0.1:ns_config_rep<0.312.0>:ns_config_rep:init:68]init pushing [ns_server:debug,2014-08-19T15:37:26.957,ns_1@127.0.0.1:ns_config_rep<0.312.0>:ns_config_rep:init:72]init reannouncing [ns_server:debug,2014-08-19T15:37:26.957,ns_1@127.0.0.1:ns_config_events<0.277.0>:ns_node_disco_conf_events:handle_event:50]ns_node_disco_conf_events config on otp [ns_server:debug,2014-08-19T15:37:26.957,ns_1@127.0.0.1:ns_cookie_manager<0.274.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T15:37:26.957,ns_1@127.0.0.1:ns_config_events<0.277.0>:ns_node_disco_conf_events:handle_event:44]ns_node_disco_conf_events config on nodes_wanted [ns_server:debug,2014-08-19T15:37:26.957,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: otp -> [{cookie,xyzevwdfypcplvpp}] [ns_server:debug,2014-08-19T15:37:26.957,ns_1@127.0.0.1:ns_cookie_manager<0.274.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T15:37:26.957,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',config_version} -> {2,3,0} [ns_server:debug,2014-08-19T15:37:26.958,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: alert_limits -> [{max_overhead_perc,50},{max_disk_used,90}] [ns_server:debug,2014-08-19T15:37:26.958,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: auto_failover_cfg -> [{enabled,false},{timeout,120},{max_nodes,1},{count,0}] [ns_server:debug,2014-08-19T15:37:26.958,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: autocompaction -> [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T15:37:26.958,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[]}] [ns_server:debug,2014-08-19T15:37:26.958,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: drop_request_memory_threshold_mib -> undefined [error_logger:info,2014-08-19T15:37:26.958,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.312.0>}, {name,ns_config_rep}, {mfargs,{ns_config_rep,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:26.958,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: email_alerts -> [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server,[{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}] [error_logger:info,2014-08-19T15:37:26.958,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.303.0>}, {name,ns_node_disco_sup}, {mfa,{ns_node_disco_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2014-08-19T15:37:26.959,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: fast_warmup -> [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}] [ns_server:debug,2014-08-19T15:37:26.958,ns_1@127.0.0.1:ns_config_rep<0.312.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([alert_limits,auto_failover_cfg,autocompaction, buckets,drop_request_memory_threshold_mib, email_alerts,fast_warmup, index_aware_rebalance_disabled, max_bucket_count,memory_quota,nodes_wanted,otp, remote_clusters,replication, replication_topology,rest,rest_creds, set_view_update_daemon, {couchdb,max_parallel_indexers}, {couchdb,max_parallel_replica_indexers}, {request_limit,capi}, {request_limit,rest}, {node,'ns_1@127.0.0.1',capi_port}, {node,'ns_1@127.0.0.1',compaction_daemon}, {node,'ns_1@127.0.0.1',config_version}, {node,'ns_1@127.0.0.1',isasl}, {node,'ns_1@127.0.0.1',membership}, {node,'ns_1@127.0.0.1',memcached}, {node,'ns_1@127.0.0.1',moxi}, {node,'ns_1@127.0.0.1',ns_log}, {node,'ns_1@127.0.0.1',port_servers}, {node,'ns_1@127.0.0.1',rest}, {node,'ns_1@127.0.0.1',ssl_capi_port}, {node,'ns_1@127.0.0.1', ssl_proxy_downstream_port}, {node,'ns_1@127.0.0.1',ssl_proxy_upstream_port}, {node,'ns_1@127.0.0.1',ssl_rest_port}]..) [ns_server:debug,2014-08-19T15:37:26.959,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: index_aware_rebalance_disabled -> false [ns_server:debug,2014-08-19T15:37:26.959,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: max_bucket_count -> 10 [ns_server:debug,2014-08-19T15:37:26.959,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: memory_quota -> 58026 [ns_server:debug,2014-08-19T15:37:26.959,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@127.0.0.1'] [ns_server:debug,2014-08-19T15:37:26.960,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: remote_clusters -> [] [error_logger:info,2014-08-19T15:37:26.960,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.318.0>}, {name,vbucket_map_mirror}, {mfa,{vbucket_map_mirror,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:26.960,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: replication -> [{enabled,true}] [ns_server:debug,2014-08-19T15:37:26.960,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: replication_topology -> star [ns_server:debug,2014-08-19T15:37:26.960,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: rest -> [{port,8091}] [ns_server:info,2014-08-19T15:37:26.960,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:handle_info:63]config change: rest_creds -> ******** [ns_server:debug,2014-08-19T15:37:26.960,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: set_view_update_daemon -> [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}] [ns_server:debug,2014-08-19T15:37:26.960,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_indexers} -> 4 [ns_server:debug,2014-08-19T15:37:26.961,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_replica_indexers} -> 2 [ns_server:debug,2014-08-19T15:37:26.961,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {request_limit,capi} -> undefined [ns_server:debug,2014-08-19T15:37:26.961,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {request_limit,rest} -> undefined [ns_server:debug,2014-08-19T15:37:26.961,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',capi_port} -> 8092 [ns_server:debug,2014-08-19T15:37:26.961,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T15:37:26.961,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T15:37:26.961,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',membership} -> active [ns_server:debug,2014-08-19T15:37:26.961,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T15:37:26.961,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T15:37:26.961,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [ns_server:debug,2014-08-19T15:37:26.962,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T15:37:26.962,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T15:37:26.962,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T15:37:26.962,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T15:37:26.962,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T15:37:26.963,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_rest_port} -> 18091 [ns_server:debug,2014-08-19T15:37:26.978,ns_1@127.0.0.1:ns_cookie_manager<0.274.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T15:37:26.979,ns_1@127.0.0.1:ns_cookie_manager<0.274.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T15:37:26.979,ns_1@127.0.0.1:<0.315.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T15:37:26.979,ns_1@127.0.0.1:ns_cookie_manager<0.274.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [error_logger:info,2014-08-19T15:37:26.979,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.320.0>}, {name,bucket_info_cache}, {mfa,{bucket_info_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:26.979,ns_1@127.0.0.1:<0.315.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: xyzevwdfypcplvpp [error_logger:info,2014-08-19T15:37:26.979,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.323.0>}, {name,ns_tick_event}, {mfa,{gen_event,start_link,[{local,ns_tick_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:26.979,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.324.0>}, {name,buckets_events}, {mfa,{gen_event,start_link,[{local,buckets_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:26.980,ns_1@127.0.0.1:ns_log_events<0.302.0>:ns_mail_log:init:44]ns_mail_log started up [error_logger:info,2014-08-19T15:37:26.980,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_mail_sup} started: [{pid,<0.326.0>}, {name,ns_mail_log}, {mfargs,{ns_mail_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:26.980,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.325.0>}, {name,ns_mail_sup}, {mfa,{ns_mail_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:26.981,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.327.0>}, {name,ns_stats_event}, {mfa,{gen_event,start_link,[{local,ns_stats_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:27.012,ns_1@127.0.0.1:ns_cookie_manager<0.274.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [error_logger:info,2014-08-19T15:37:27.012,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.328.0>}, {name,samples_loader_tasks}, {mfa,{samples_loader_tasks,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:27.012,ns_1@127.0.0.1:<0.316.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T15:37:27.012,ns_1@127.0.0.1:<0.316.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: xyzevwdfypcplvpp [error_logger:info,2014-08-19T15:37:27.014,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.329.0>}, {name,ns_heart}, {mfa,{ns_heart,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:27.016,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.331.0>:ns_heart:current_status_slow:248]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} [error_logger:info,2014-08-19T15:37:27.018,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.333.0>}, {name,ns_doctor}, {mfa,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:27.024,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.336.0>}, {name,remote_clusters_info}, {mfa,{remote_clusters_info,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:27.024,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.337.0>}, {name,master_activity_events}, {mfa, {gen_event,start_link, [{local,master_activity_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:27.025,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.331.0>:ns_heart:grab_local_xdcr_replications:438]Ignoring exception getting xdcr replication infos {exit,{noproc,{gen_server,call,[xdc_replication_sup,which_children,infinity]}}, [{gen_server,call,3}, {xdc_replication_sup,all_local_replication_infos,0}, {ns_heart,grab_local_xdcr_replications,0}, {ns_heart,current_status_slow,0}, {ns_heart,slow_updater_loop,1}, {proc_lib,init_p_do_apply,3}]} [ns_server:debug,2014-08-19T15:37:27.027,ns_1@127.0.0.1:ns_server_sup<0.288.0>:mb_master:check_master_takeover_needed:141]Sending master node question to the following nodes: [] [ns_server:debug,2014-08-19T15:37:27.027,ns_1@127.0.0.1:ns_server_sup<0.288.0>:mb_master:check_master_takeover_needed:143]Got replies: [] [ns_server:debug,2014-08-19T15:37:27.027,ns_1@127.0.0.1:ns_server_sup<0.288.0>:mb_master:check_master_takeover_needed:149]Was unable to discover master, not going to force mastership takeover [user:info,2014-08-19T15:37:27.029,ns_1@127.0.0.1:mb_master<0.340.0>:mb_master:init:86]I'm the only node, so I'm the master. [ns_server:debug,2014-08-19T15:37:27.034,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.331.0>:ns_heart:current_status_slow:248]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} [ns_server:debug,2014-08-19T15:37:27.035,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.331.0>:ns_heart:grab_local_xdcr_replications:438]Ignoring exception getting xdcr replication infos {exit,{noproc,{gen_server,call,[xdc_replication_sup,which_children,infinity]}}, [{gen_server,call,3}, {xdc_replication_sup,all_local_replication_infos,0}, {ns_heart,grab_local_xdcr_replications,0}, {ns_heart,current_status_slow,0}, {ns_heart,slow_updater_loop,1}]} [ns_server:info,2014-08-19T15:37:27.039,ns_1@127.0.0.1:ns_config<0.279.0>:ns_online_config_upgrader:upgrade_config_on_join_from_pre_2_0_to_2_0:70]Adding some 2.0 specific keys to the config [ns_server:debug,2014-08-19T15:37:27.039,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: dynamic_config_version -> undefined [ns_server:debug,2014-08-19T15:37:27.039,ns_1@127.0.0.1:ns_config_rep<0.312.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([dynamic_config_version]..) [ns_server:debug,2014-08-19T15:37:27.039,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config:do_upgrade_config:577]Upgrading config by changes: [{set,dynamic_config_version,[2,0]},{set,vbucket_map_history,[]}] [ns_server:debug,2014-08-19T15:37:27.041,ns_1@127.0.0.1:ns_config_rep<0.312.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([cluster_compat_version]..) [ns_server:debug,2014-08-19T15:37:27.041,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: cluster_compat_version -> [2,5] [ns_server:debug,2014-08-19T15:37:27.042,ns_1@127.0.0.1:ns_config_rep<0.312.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@127.0.0.1' [ns_server:debug,2014-08-19T15:37:27.042,ns_1@127.0.0.1:ns_config_rep<0.312.0>:ns_config_rep:handle_call:119]Fully synchronized config in 10 us [user:warn,2014-08-19T15:37:27.042,ns_1@127.0.0.1:<0.347.0>:ns_orchestrator:consider_switching_compat_mode:1051]Changed cluster compat mode from undefined to [2,5] [ns_server:debug,2014-08-19T15:37:27.042,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: dynamic_config_version -> undefined [ns_server:info,2014-08-19T15:37:27.042,ns_1@127.0.0.1:ns_config<0.279.0>:ns_online_config_upgrader:upgrade_config_from_pre_2_0_to_2_0:74]Performing online config upgrade to 2.0 version [ns_server:debug,2014-08-19T15:37:27.042,ns_1@127.0.0.1:ns_config_rep<0.312.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([dynamic_config_version]..) [ns_server:debug,2014-08-19T15:37:27.042,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config:do_upgrade_config:577]Upgrading config by changes: [{set,dynamic_config_version,[2,0]}] [ns_server:info,2014-08-19T15:37:27.043,ns_1@127.0.0.1:ns_config<0.279.0>:ns_online_config_upgrader:upgrade_config_from_2_0_to_2_5:78]Performing online config upgrade to 2.5 version [ns_server:debug,2014-08-19T15:37:27.062,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config:do_upgrade_config:577]Upgrading config by changes: [{set,dynamic_config_version,[2,5]}, {set,server_groups, [[{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@127.0.0.1']}]]}] [ns_server:debug,2014-08-19T15:37:27.062,ns_1@127.0.0.1:mb_master_sup<0.342.0>:misc:start_singleton:986]start_singleton(gen_fsm, ns_orchestrator, [], []): started as <0.347.0> on 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T15:37:27.063,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.347.0>}, {name,ns_orchestrator}, {mfargs,{ns_orchestrator,start_link,[]}}, {restart_type,permanent}, {shutdown,20}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:27.064,ns_1@127.0.0.1:mb_master_sup<0.342.0>:misc:start_singleton:986]start_singleton(gen_server, ns_tick, [], []): started as <0.356.0> on 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T15:37:27.064,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.356.0>}, {name,ns_tick}, {mfargs,{ns_tick,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:27.066,ns_1@127.0.0.1:<0.357.0>:auto_failover:init:134]init auto_failover. [ns_server:debug,2014-08-19T15:37:27.066,ns_1@127.0.0.1:mb_master_sup<0.342.0>:misc:start_singleton:986]start_singleton(gen_server, auto_failover, [], []): started as <0.357.0> on 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T15:37:27.066,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.357.0>}, {name,auto_failover}, {mfargs,{auto_failover,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:27.066,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.340.0>}, {name,mb_master}, {mfa,{mb_master,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:27.067,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.358.0>}, {name,master_activity_events_ingress}, {mfa, {gen_event,start_link, [{local,master_activity_events_ingress}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:27.067,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.359.0>}, {name,master_activity_events_timestamper}, {mfa, {master_activity_events,start_link_timestamper,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:27.089,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.360.0>}, {name,master_activity_events_pids_watcher}, {mfa, {master_activity_events_pids_watcher,start_link, []}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:27.104,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.361.0>}, {name,master_activity_events_keeper}, {mfa,{master_activity_events_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:29.130,ns_1@127.0.0.1:ns_ssl_services_setup<0.365.0>:ns_server_cert:generate_cert_and_pkey:44]Generated certificate and private key in 2022329 us [ns_server:debug,2014-08-19T15:37:29.131,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: cert_and_pkey -> {<<"-----BEGIN CERTIFICATE-----\nMIICmDCCAYKgAwIBAgIIE4vQOGMt4U8wCwYJKoZIhvcNAQEFMAwxCjAIBgNVBAMT\nASowHhcNMTMwMTAxMDAwMDAwWhcNNDkxMjMxMjM1OTU5WjAMMQowCAYDVQQDEwEq\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5WgTuSJMU8qPdc8uDdst\nav13oFxDpbqz8mIk7TVReVHwO9MvKgi8cqlGev50BaQNfzFW41E/baDmpa8sAlSe\nzPoGcRD5wDJdHRH87FdW8eeE4rA8N9TcsSyJDo0gmWO+Vj+ow5dzF87001UstU6A\n5UQ5anT0dGnKLChpmk0KiKx28+XSnycDQ8osiLR"...>>, <<"*****">>} [ns_server:debug,2014-08-19T15:37:29.131,ns_1@127.0.0.1:ns_config_rep<0.312.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([cert_and_pkey]..) [error_logger:info,2014-08-19T15:37:29.165,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_ssl_services_sup} started: [{pid,<0.365.0>}, {name,ns_ssl_services_setup}, {mfargs,{ns_ssl_services_setup,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:29.166,ns_1@127.0.0.1:ns_ssl_services_setup<0.365.0>:ns_ssl_services_setup:restart_xdcr_proxy:201]Xdcr proxy restart failed. But that's usually normal. {'EXIT', {{badmatch, {badrpc, {'EXIT', {{case_clause, false}, [{ns_child_ports_sup, restart_port_by_name, 1}, {rpc, '-handle_call_call/6-fun-0-', 5}]}}}}, [{ns_ports_setup, restart_xdcr_proxy, 0}, {ns_ssl_services_setup, restart_xdcr_proxy, 0}, {ns_ssl_services_setup, init,1}, {gen_server,init_it, 6}, {proc_lib, init_p_do_apply, 3}]}} [error_logger:info,2014-08-19T15:37:29.192,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_ssl_services_sup} started: [{pid,<0.373.0>}, {name,ns_rest_ssl_service}, {mfargs, {ns_ssl_services_setup,start_link_rest_service,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.193,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_ssl_services_sup} started: [{pid,<0.390.0>}, {name,ns_capi_ssl_service}, {mfargs, {ns_ssl_services_setup,start_link_capi_service,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.194,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.364.0>}, {name,ns_ssl_services_sup}, {mfargs,{ns_ssl_services_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:29.194,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.407.0>}, {name,menelaus_ui_auth}, {mfargs,{menelaus_ui_auth,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.195,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.408.0>}, {name,menelaus_web_cache}, {mfargs,{menelaus_web_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.196,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.409.0>}, {name,menelaus_stats_gatherer}, {mfargs,{menelaus_stats_gatherer,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.198,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.410.0>}, {name,menelaus_web}, {mfargs,{menelaus_web,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.199,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.427.0>}, {name,menelaus_event}, {mfargs,{menelaus_event,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.200,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.428.0>}, {name,hot_keys_keeper}, {mfargs,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.202,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.429.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [user:info,2014-08-19T15:37:29.202,ns_1@127.0.0.1:ns_server_sup<0.288.0>:menelaus_sup:start_link:44]Couchbase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. [error_logger:info,2014-08-19T15:37:29.202,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.363.0>}, {name,menelaus}, {mfa,{menelaus_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:29.203,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.431.0>}, {name,mc_couch_events}, {mfargs, {gen_event,start_link,[{local,mc_couch_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.204,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.432.0>}, {name,mc_conn_sup}, {mfargs,{mc_conn_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,supervisor}] [ns_server:info,2014-08-19T15:37:29.204,ns_1@127.0.0.1:<0.433.0>:mc_tcp_listener:init:24]mccouch is listening on port 11213 [error_logger:info,2014-08-19T15:37:29.205,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.433.0>}, {name,mc_tcp_listener}, {mfargs,{mc_tcp_listener,start_link,[11213]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.205,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.430.0>}, {name,mc_sup}, {mfa,{mc_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:29.205,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.434.0>}, {name,ns_ports_setup}, {mfa,{ns_ports_setup,start,[]}}, {restart_type,{permanent,4}}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.205,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.435.0>}, {name,ns_port_memcached_killer}, {mfa,{ns_ports_setup,start_memcached_force_killer,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2014-08-19T15:37:29.206,ns_1@127.0.0.1:<0.437.0>:ns_memcached_log_rotator:init:28]Starting log rotator on "/opt/couchbase/var/lib/couchbase/logs"/"memcached.log"* with an initial period of 39003ms [error_logger:info,2014-08-19T15:37:29.206,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.437.0>}, {name,ns_memcached_log_rotator}, {mfa,{ns_memcached_log_rotator,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.212,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.439.0>}, {name,memcached_clients_pool}, {mfa,{memcached_clients_pool,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.214,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.440.0>}, {name,proxied_memcached_clients_pool}, {mfa,{proxied_memcached_clients_pool,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.214,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.441.0>}, {name,xdc_lhttpc_pool}, {mfa, {lhttpc_manager,start_link, [[{name,xdc_lhttpc_pool}, {connection_timeout,120000}, {pool_size,200}]]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.214,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.442.0>}, {name,ns_null_connection_pool}, {mfa, {ns_null_connection_pool,start_link, [ns_null_connection_pool]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.215,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.443.0>}, {name,xdc_replication_sup}, {mfa,{xdc_replication_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:29.248,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.444.0>}, {name,xdc_rep_manager}, {mfa,{xdc_rep_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.250,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.453.0>}, {name,ns_memcached_sockets_pool}, {mfa,{ns_memcached_sockets_pool,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.255,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_worker_sup} started: [{pid,<0.456.0>}, {name,ns_bucket_worker}, {mfargs,{work_queue,start_link,[ns_bucket_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.256,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.458.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.256,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_worker_sup} started: [{pid,<0.457.0>}, {name,ns_bucket_sup}, {mfargs,{ns_bucket_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:29.256,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.455.0>}, {name,ns_bucket_worker_sup}, {mfa,{ns_bucket_worker_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:29.256,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.459.0>}, {name,system_stats_collector}, {mfa,{system_stats_collector,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.258,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.462.0>}, {name,{stats_archiver,"@system"}}, {mfa,{stats_archiver,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:37:29.258,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.464.0>}, {name,{stats_reader,"@system"}}, {mfa,{stats_reader,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:29.262,ns_1@127.0.0.1:compaction_daemon<0.465.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [error_logger:info,2014-08-19T15:37:29.262,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.465.0>}, {name,compaction_daemon}, {mfa,{compaction_daemon,start_link,[]}}, {restart_type,{permanent,4}}, {shutdown,86400000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:29.262,ns_1@127.0.0.1:compaction_daemon<0.465.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:37:29.264,ns_1@127.0.0.1:xdc_rdoc_replication_srv<0.467.0>:xdc_rdoc_replication_srv:init:76]Loaded the following docs: [] [ns_server:debug,2014-08-19T15:37:29.264,ns_1@127.0.0.1:xdc_rdoc_replication_srv<0.467.0>:xdc_rdoc_replication_srv:handle_info:154]doing replicate_newnodes_docs [error_logger:info,2014-08-19T15:37:29.264,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.467.0>}, {name,xdc_rdoc_replication_srv}, {mfa,{xdc_rdoc_replication_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T15:37:29.265,ns_1@127.0.0.1:set_view_update_daemon<0.469.0>:set_view_update_daemon:init:50]Set view update daemon, starting with the following settings: update interval: 5000ms minimum number of changes: 5000 [error_logger:info,2014-08-19T15:37:29.265,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.469.0>}, {name,set_view_update_daemon}, {mfa,{set_view_update_daemon,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:37:29.265,ns_1@127.0.0.1:<0.2.0>:child_erlang:child_loop:104]Entered child_loop [error_logger:info,2014-08-19T15:37:29.265,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.288.0>}, {name,ns_server_sup}, {mfargs,{ns_server_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:37:29.266,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: ns_server started_at: 'ns_1@127.0.0.1' [ns_server:debug,2014-08-19T15:37:59.263,ns_1@127.0.0.1:compaction_daemon<0.465.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:37:59.263,ns_1@127.0.0.1:compaction_daemon<0.465.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:38:00.086,ns_1@127.0.0.1:ns_config_log<0.283.0>:ns_config_log:log_common:138]config change: uuid -> <<"9032e293d656a8b04683554c561fe06f">> [ns_server:debug,2014-08-19T15:38:00.086,ns_1@127.0.0.1:ns_config_rep<0.312.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([uuid]..) [ns_server:debug,2014-08-19T15:38:29.264,ns_1@127.0.0.1:compaction_daemon<0.465.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:38:29.264,ns_1@127.0.0.1:compaction_daemon<0.465.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:38:59.265,ns_1@127.0.0.1:compaction_daemon<0.465.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:38:59.265,ns_1@127.0.0.1:compaction_daemon<0.465.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:39:29.266,ns_1@127.0.0.1:compaction_daemon<0.465.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:39:29.266,ns_1@127.0.0.1:compaction_daemon<0.465.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:39:59.267,ns_1@127.0.0.1:compaction_daemon<0.465.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:39:59.267,ns_1@127.0.0.1:compaction_daemon<0.465.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:40:29.268,ns_1@127.0.0.1:compaction_daemon<0.465.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:40:29.268,ns_1@127.0.0.1:compaction_daemon<0.465.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [user:info,2014-08-19T15:40:48.627,ns_1@127.0.0.1:<0.294.0>:ns_log:crash_consumption_loop:64]Port server memcached on node 'babysitter_of_ns_1@127.0.0.1' exited with status 0. Restarting. Messages: EOL on stdin. Initiating shutdown [user:info,2014-08-19T15:40:48.628,ns_1@127.0.0.1:<0.294.0>:ns_log:crash_consumption_loop:64]Port server moxi on node 'babysitter_of_ns_1@127.0.0.1' exited with status 0. Restarting. Messages: EOL on stdin. Exiting [ns_server:debug,2014-08-19T15:40:48.628,ns_1@127.0.0.1:<0.436.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.434.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:40:48.628,ns_1@127.0.0.1:<0.2.0>:child_erlang:child_loop:108]Got EOL [ns_server:info,2014-08-19T15:40:48.628,ns_1@127.0.0.1:<0.2.0>:ns_bootstrap:stop:41]Initiated server shutdown [error_logger:info,2014-08-19T15:40:48.628,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.1032.0>}, {name,ns_ports_setup}, {mfa,{ns_ports_setup,start,[]}}, {restart_type,{permanent,4}}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:48.628,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:119]Initiated server shutdown [ns_server:debug,2014-08-19T15:40:48.629,ns_1@127.0.0.1:<0.470.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.469.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:40:48.629,ns_1@127.0.0.1:<0.466.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.465.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:40:48.800,ns_1@127.0.0.1:<0.463.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_stats_event,<0.462.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:40:48.800,ns_1@127.0.0.1:<0.461.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_tick_event,<0.459.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:40:48.800,ns_1@127.0.0.1:<0.458.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.457.0>} exited with reason shutdown [error_logger:error,2014-08-19T15:40:48.801,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_bucket_sup} Context: shutdown_error Reason: normal Offender: [{pid,<0.458.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:40:48.801,ns_1@127.0.0.1:<0.1033.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.1032.0>} exited with reason killed [ns_server:debug,2014-08-19T15:40:48.801,ns_1@127.0.0.1:<0.438.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.435.0>} exited with reason killed [ns_server:debug,2014-08-19T15:40:48.801,ns_1@127.0.0.1:<0.366.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.365.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:40:48.801,ns_1@127.0.0.1:<0.362.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {master_activity_events,<0.361.0>} exited with reason killed [ns_server:info,2014-08-19T15:40:48.802,ns_1@127.0.0.1:mb_master<0.340.0>:mb_master:terminate:299]Synchronously shutting down child mb_master_sup [ns_server:debug,2014-08-19T15:40:48.802,ns_1@127.0.0.1:<0.341.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.340.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:40:48.802,ns_1@127.0.0.1:<0.334.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.333.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:40:48.802,ns_1@127.0.0.1:<0.330.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {buckets_events,<0.329.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:40:48.802,ns_1@127.0.0.1:<0.322.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.320.0>} exited with reason killed [ns_server:debug,2014-08-19T15:40:48.802,ns_1@127.0.0.1:<0.319.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.318.0>} exited with reason killed [ns_server:debug,2014-08-19T15:40:48.802,ns_1@127.0.0.1:<0.313.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events_local,<0.312.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:40:48.802,ns_1@127.0.0.1:<0.299.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.298.0>} exited with reason shutdown [ns_server:debug,2014-08-19T15:40:48.803,ns_1@127.0.0.1:<0.296.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.295.0>} exited with reason killed [error_logger:error,2014-08-19T15:40:48.804,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================CRASH REPORT========================= crasher: initial call: gen_event:init_it/6 pid: <0.321.0> registered_name: bucket_info_cache_invalidations exception exit: killed in function gen_event:terminate_server/4 ancestors: [bucket_info_cache,ns_server_sup,ns_server_cluster_sup, <0.58.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 233 stack_size: 24 reductions: 119 neighbours: [ns_server:debug,2014-08-19T15:40:48.906,ns_1@127.0.0.1:<0.286.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.285.0>} exited with reason shutdown [error_logger:error,2014-08-19T15:40:48.906,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_server_cluster_sup} Context: shutdown_error Reason: killed Offender: [{pid,<0.287.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [ns_server:debug,2014-08-19T15:40:48.906,ns_1@127.0.0.1:ns_config<0.279.0>:ns_config:wait_saver:652]Done waiting for saver. [ns_server:debug,2014-08-19T15:40:48.906,ns_1@127.0.0.1:<0.284.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.283.0>} exited with reason shutdown [error_logger:error,2014-08-19T15:40:48.907,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:119]** Generic server <0.449.0> terminating ** Last message in was {'EXIT',<0.212.0>,killed} ** When Server state == {db,<0.449.0>,<0.450.0>,nil,<<"1408448249248355">>, <0.445.0>,<0.451.0>, {db_header,11,0,nil,nil,nil,0,nil,nil}, 0, {btree,<0.445.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.445.0>,nil, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.445.0>,nil,identity,identity, #Fun,nil,1279,2558, true}, 0,<<"_replicator">>, "/opt/couchbase/var/lib/couchbase/data/_replicator.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], [create,sys_db, {user_ctx, {user_ctx,null, [<<"_admin">>,<<"_replicator">>], undefined}}]} ** Reason for termination == ** killed [error_logger:error,2014-08-19T15:40:48.909,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.449.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.58.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 610 stack_size: 24 reductions: 241 neighbours: [error_logger:error,2014-08-19T15:40:48.909,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:119]** Generic server <0.227.0> terminating ** Last message in was {'EXIT',<0.212.0>,killed} ** When Server state == {db,<0.227.0>,<0.228.0>,nil,<<"1408448245593037">>, <0.223.0>,<0.229.0>, {db_header,11,1, <<0,0,0,0,13,103,0,0,0,0,0,51,0,0,0,0,1,0,0,0, 0,0,0,0,0,0,13,69>>, <<0,0,0,0,13,154,0,0,0,0,0,49,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.223.0>, {3431, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,13,69>>, 51}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.223.0>, {3482,<<0,0,0,0,1>>,49}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.223.0>,nil,identity,identity, #Fun,nil,1279,2558, true}, 1,<<"_users">>, "/opt/couchbase/var/lib/couchbase/data/_users.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], [create, {user_ctx, {user_ctx,null,[<<"_admin">>],undefined}}, sys_db]} ** Reason for termination == ** killed [error_logger:error,2014-08-19T15:40:48.910,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.227.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.58.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 1597 stack_size: 24 reductions: 289 neighbours: [error_logger:info,2014-08-19T15:40:48.915,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================INFO REPORT========================= application: ns_server exited: stopped type: permanent [ns_server:info,2014-08-19T15:40:53.662,nonode@nohost:<0.58.0>:ns_server:init_logging:248]Started & configured logging [ns_server:info,2014-08-19T15:40:53.665,nonode@nohost:<0.58.0>:ns_server:log_pending:30]Static config terms: [{error_logger_mf_dir,"/opt/couchbase/var/lib/couchbase/logs"}, {error_logger_mf_maxbytes,10485760}, {error_logger_mf_maxfiles,20}, {path_config_bindir,"/opt/couchbase/bin"}, {path_config_etcdir,"/opt/couchbase/etc/couchbase"}, {path_config_libdir,"/opt/couchbase/lib"}, {path_config_datadir,"/opt/couchbase/var/lib/couchbase"}, {path_config_tmpdir,"/opt/couchbase/var/lib/couchbase/tmp"}, {nodefile,"/opt/couchbase/var/lib/couchbase/couchbase-server.node"}, {loglevel_default,debug}, {loglevel_couchdb,info}, {loglevel_ns_server,debug}, {loglevel_error_logger,debug}, {loglevel_user,debug}, {loglevel_menelaus,debug}, {loglevel_ns_doctor,debug}, {loglevel_stats,debug}, {loglevel_rebalance,debug}, {loglevel_cluster,debug}, {loglevel_views,debug}, {loglevel_mapreduce_errors,debug}, {loglevel_xdcr,debug}] [ns_server:info,2014-08-19T15:40:53.819,nonode@nohost:<0.58.0>:ns_server:start:58]Locked myself into a memory successfully. [error_logger:info,2014-08-19T15:40:53.856,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,crypto_sup} started: [{pid,<0.167.0>}, {name,crypto_server}, {mfargs,{crypto_server,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:53.856,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: crypto started_at: nonode@nohost [error_logger:info,2014-08-19T15:40:53.865,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: asn1 started_at: nonode@nohost [error_logger:info,2014-08-19T15:40:53.870,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: public_key started_at: nonode@nohost [error_logger:info,2014-08-19T15:40:53.878,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.174.0>}, {name,ftp_sup}, {mfargs,{ftp_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:53.900,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,httpc_profile_sup} started: [{pid,<0.177.0>}, {name,httpc_manager}, {mfargs, {httpc_manager,start_link, [default,only_session_cookies,inets]}}, {restart_type,permanent}, {shutdown,4000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:53.900,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,httpc_sup} started: [{pid,<0.176.0>}, {name,httpc_profile_sup}, {mfargs, {httpc_profile_sup,start_link, [[{httpc,{default,only_session_cookies}}]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:53.903,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,httpc_sup} started: [{pid,<0.178.0>}, {name,httpc_handler_sup}, {mfargs,{httpc_handler_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:53.903,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.175.0>}, {name,httpc_sup}, {mfargs, {httpc_sup,start_link, [[{httpc,{default,only_session_cookies}}]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:53.906,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.179.0>}, {name,httpd_sup}, {mfargs,{httpd_sup,start_link,[[]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:53.909,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,inets_sup} started: [{pid,<0.180.0>}, {name,tftp_sup}, {mfargs,{tftp_sup,start_link,[[]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:53.909,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: inets started_at: nonode@nohost [error_logger:info,2014-08-19T15:40:53.909,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: oauth started_at: nonode@nohost [error_logger:info,2014-08-19T15:40:53.918,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.186.0>}, {name,ssl_broker_sup}, {mfargs,{ssl_broker_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:53.925,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.187.0>}, {name,ssl_manager}, {mfargs,{ssl_manager,start_link,[[]]}}, {restart_type,permanent}, {shutdown,4000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:53.927,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.188.0>}, {name,ssl_connection}, {mfargs,{ssl_connection_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,4000}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:53.927,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: ssl started_at: nonode@nohost [error_logger:info,2014-08-19T15:40:54.086,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ssl_sup} started: [{pid,<0.195.0>}, {name,ssl_server}, {mfargs,{ssl_server,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.086,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,lhttpc_sup} started: [{pid,<0.193.0>}, {name,lhttpc_manager}, {mfargs, {lhttpc_manager,start_link, [[{name,lhttpc_manager}]]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.087,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: lhttpc started_at: nonode@nohost [error_logger:info,2014-08-19T15:40:54.090,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: xmerl started_at: nonode@nohost [error_logger:info,2014-08-19T15:40:54.101,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: compiler started_at: nonode@nohost [error_logger:info,2014-08-19T15:40:54.105,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: syntax_tools started_at: nonode@nohost [error_logger:info,2014-08-19T15:40:54.105,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: mochiweb started_at: nonode@nohost [error_logger:info,2014-08-19T15:40:54.108,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: couch_view_parser started_at: nonode@nohost [error_logger:info,2014-08-19T15:40:54.111,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: couch_set_view started_at: nonode@nohost [error_logger:info,2014-08-19T15:40:54.114,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: couch_index_merger started_at: nonode@nohost [error_logger:info,2014-08-19T15:40:54.116,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: mapreduce started_at: nonode@nohost [error_logger:info,2014-08-19T15:40:54.148,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.204.0>}, {name,couch_config}, {mfargs, {couch_server_sup,couch_config_start_link_wrapper, [["/opt/couchbase/etc/couchdb/default.ini", "/opt/couchbase/etc/couchdb/default.d/capi.ini", "/opt/couchbase/etc/couchdb/default.d/geocouch.ini", "/opt/couchbase/etc/couchdb/local.ini"], <0.204.0>]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.174,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.207.0>}, {name,collation_driver}, {mfargs,{couch_drv,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:54.174,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.208.0>}, {name,couch_task_events}, {mfargs, {gen_event,start_link,[{local,couch_task_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.176,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.209.0>}, {name,couch_task_status}, {mfargs,{couch_task_status,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.178,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.210.0>}, {name,couch_file_write_guard}, {mfargs,{couch_file_write_guard,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.200,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.211.0>}, {name,couch_server}, {mfargs,{couch_server,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.201,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.224.0>}, {name,couch_db_update_event}, {mfargs, {gen_event,start_link,[{local,couch_db_update}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.201,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.225.0>}, {name,couch_replication_event}, {mfargs, {gen_event,start_link,[{local,couch_replication}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.202,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.226.0>}, {name,couch_replication_supervisor}, {mfargs,{couch_rep_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:54.204,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.227.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.207,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.228.0>}, {name,couch_main_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_main_index_barrier, "max_parallel_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.207,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.229.0>}, {name,couch_replica_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_replica_index_barrier, "max_parallel_replica_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.208,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.230.0>}, {name,couch_spatial_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_spatial_index_barrier, "max_parallel_spatial_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.208,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.206.0>}, {name,couch_primary_services}, {mfargs,{couch_primary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:54.211,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.232.0>}, {name,couch_db_update_notifier_sup}, {mfargs,{couch_db_update_notifier_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:54.218,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.233.0>}, {name,auth_cache}, {mfargs,{couch_auth_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.228,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.236.0>}, {name,set_view_manager}, {mfargs,{couch_set_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.231,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.239.0>}, {name,spatial_manager}, {mfargs,{couch_spatial,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.231,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.241.0>}, {name,index_merger_pool}, {mfargs, {lhttpc_manager,start_link, [[{connection_timeout,90000}, {pool_size,10000}, {name,couch_index_merger_connection_pool}]]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.234,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.242.0>}, {name,query_servers}, {mfargs,{couch_query_servers,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.236,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.244.0>}, {name,couch_set_view_ddoc_cache}, {mfargs,{couch_set_view_ddoc_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.239,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.246.0>}, {name,view_manager}, {mfargs,{couch_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.252,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.248.0>}, {name,httpd}, {mfargs,{couch_httpd,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.254,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.265.0>}, {name,uuids}, {mfargs,{couch_uuids,start,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.254,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.231.0>}, {name,couch_secondary_services}, {mfargs,{couch_secondary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:54.254,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,cb_couch_sup} started: [{pid,<0.205.0>}, {name,couch_app}, {mfargs, {couch_app,start, [fake, ["/opt/couchbase/etc/couchdb/default.ini", "/opt/couchbase/etc/couchdb/local.ini"]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:54.255,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.162.0>}, {name,cb_couch_sup}, {mfargs,{cb_couch_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,supervisor}] [ns_server:info,2014-08-19T15:40:54.262,nonode@nohost:ns_server_cluster_sup<0.161.0>:log_os_info:start_link:25]OS type: {unix,linux} Version: {2,6,32} Runtime info: [{otp_release,"R14B04"}, {erl_version,"5.8.5"}, {erl_version_long, "Erlang R14B04 (erts-5.8.5) [source] [64-bit] [smp:24:24] [rq:24] [async-threads:16] [kernel-poll:true]\n"}, {system_arch_raw,"x86_64-unknown-linux-gnu"}, {system_arch,"x86_64-unknown-linux-gnu"}, {localtime,{{2014,8,19},{15,40,54}}}, {memory, [{total,560846912}, {processes,5642664}, {processes_used,5634008}, {system,555204248}, {atom,830761}, {atom_used,821985}, {binary,53176}, {code,7808288}, {ets,644880}]}, {loaded, [ns_info,log_os_info,couch_config_writer,cb_init_loggers, couch_uuids,mochiweb_acceptor,inet_tcp,gen_tcp, mochiweb_socket,mochiweb_socket_server,mochilists, mochiweb_http,eval_bits,couch_httpd,couch_view, couch_set_view_ddoc_cache,couch_query_servers, couch_spatial,mapreduce,couch_set_view, couch_db_update_notifier,snappy,couch_compress, couch_auth_cache,couch_db_update_notifier_sup, couch_secondary_sup,queue,couch_index_barrier, couch_event_sup,couch_log,couch_rep_sup,couch_btree, couch_ref_counter,couch_db_updater,couch_db,httpd_util, filelib,couch_file,couch_file_write_guard, couch_task_status,erl_ddll,couch_drv,couch_primary_sup, couch_server,string,re,file2,couch_util,couch_config, couch_server_sup,ssl_server,crypto,ssl,lhttpc_manager, lhttpc_sup,lhttpc,ssl_connection_sup,ssl_session_cache, ssl_certificate_db,ssl_manager,ssl_broker_sup,ssl_sup, ssl_app,tftp_sup,httpd_sup,httpc_handler_sup,httpc_cookie, inets,httpc_manager,httpc,httpc_profile_sup,httpc_sup, ftp_sup,inets_sup,inets_app,crypto_server,crypto_sup, crypto_app,couch_app,cb_couch_sup,ns_server_cluster_sup, mlockall,calendar,ale_default_formatter,otp_internal,misc, 'ale_logger-xdcr','ale_logger-mapreduce_errors', 'ale_logger-views',timer,io_lib_fread, 'ale_logger-cluster','ale_logger-rebalance', 'ale_logger-stats','ale_logger-ns_doctor', 'ale_logger-menelaus','ale_logger-user', 'ale_logger-ns_server','ale_logger-couchdb',ns_log_sink, disk_log_sup,disk_log_server,disk_log_1,disk_log, ale_disk_sink,ns_server,cpu_sup,memsup,disksup,os_mon,io, release_handler,overload,alarm_handler,log_mf_h,sasl, ale_error_logger_handler,'ale_logger-ale_logger', 'ale_logger-error_logger',beam_opcodes,beam_dict,beam_asm, beam_validator,beam_flatten,beam_trim,beam_receive, beam_bsm,beam_peep,beam_dead,beam_type,beam_bool, beam_clean,beam_utils,beam_jump,beam_block,v3_codegen, v3_life,v3_kernel,sys_core_dsetel,erl_bifs,sys_core_fold, cerl_trees,sys_core_inline,core_lib,cerl,v3_core,erl_bits, erl_expand_records,sys_pre_expand,sofs,erl_internal,sets, ordsets,erl_lint,compile,dynamic_compile,ale_utils, io_lib_pretty,io_lib_format,io_lib,ale_codegen,dict,ale, ale_dynamic_sup,ale_sup,ale_app,ns_bootstrap,child_erlang, file_io_server,orddict,erl_eval,file,c,kernel_config, user_sup,supervisor_bridge,standard_error,unicode,binary, ets,gb_sets,hipe_unified_loader,packages,code_server,code, file_server,net_kernel,global_group,erl_distribution, filename,inet_gethost_native,os,inet_parse,inet,inet_udp, inet_config,inet_db,global,gb_trees,rpc,supervisor,kernel, application_master,sys,application,gen_server,erl_parse, proplists,erl_scan,lists,application_controller,proc_lib, gen,gen_event,error_logger,heart,error_handler,erlang, erl_prim_loader,prim_zip,zlib,prim_file,prim_inet,init, otp_ring0]}, {applications, [{public_key,"Public key infrastructure","0.13"}, {asn1,"The Erlang ASN1 compiler version 1.6.18","1.6.18"}, {lhttpc,"Lightweight HTTP Client","1.3.0"}, {ale,"Another Logger for Erlang","8ca6d2a"}, {os_mon,"CPO CXC 138 46","2.2.7"}, {couch_set_view,"Set views","1.2.0a-a425d97-git"}, {compiler,"ERTS CXC 138 10","4.7.5"}, {inets,"INETS CXC 138 49","5.7.1"}, {couch,"Apache CouchDB","1.2.0a-a425d97-git"}, {mapreduce,"MapReduce using V8 JavaScript engine","1.0.0"}, {couch_index_merger,"Index merger","1.2.0a-a425d97-git"}, {kernel,"ERTS CXC 138 10","2.14.5"}, {crypto,"CRYPTO version 2","2.0.4"}, {ssl,"Erlang/OTP SSL application","4.1.6"}, {sasl,"SASL CXC 138 11","2.1.10"}, {couch_view_parser,"Couch view parser","1.0.0"}, {ns_server,"Couchbase server","2.5.1-1083-rel-enterprise"}, {mochiweb,"MochiMedia Web Server","2.4.2"}, {syntax_tools,"Syntax tools","1.6.7.1"}, {xmerl,"XML parser","1.2.10"}, {oauth,"Erlang OAuth implementation","7d85d3ef"}, {stdlib,"ERTS CXC 138 10","1.17.5"}]}, {pre_loaded, [erlang,erl_prim_loader,prim_zip,zlib,prim_file,prim_inet, init,otp_ring0]}, {process_count,152}, {node,nonode@nohost}, {nodes,[]}, {registered, [ssl_sup,couch_file_write_guard,global_group, lhttpc_manager,tftp_sup,ale_sup,lhttpc_sup,httpc_sup, disk_log_sup,ale_dynamic_sup,disk_log_server, erl_prim_loader,httpc_profile_sup,os_mon_sup, httpc_manager,code_server,ns_server_cluster_sup, httpc_handler_sup,sasl_sup,'sink-ns_log',cpu_sup, 'sink-disk_stats',ftp_sup,couch_db_update_notifier_sup, memsup,application_controller,'sink-disk_xdcr_errors', disksup,ale,'sink-disk_xdcr',error_logger, standard_error_sup,standard_error,'sink-disk_debug', couch_log,'sink-disk_couchdb', 'sink-disk_mapreduce_errors',couch_auth_cache, 'sink-disk_views',inets_sup,couch_rep_sup, 'sink-disk_error',crypto_server,timer_server,crypto_sup, couch_view,cb_couch_sup,ssl_connection_sup, release_handler,couch_server_sup,couch_secondary_services, ssl_manager,couch_primary_services,overload, couch_db_update,couch_spatial_index_barrier, couch_replica_index_barrier,couch_query_servers, alarm_handler,httpd_sup,couch_set_view, couch_set_view_ddoc_cache,kernel_safe_sup,couch_config, couch_main_index_barrier,rex,inet_db,couch_task_status, couch_replication,couch_index_merger_connection_pool, 'sink-disk_default',kernel_sup,global_name_server, couch_spatial,ssl_broker_sup,couch_task_events, couch_server,couch_httpd,file_server_2,init,sasl_safe_sup, ssl_server,couch_drv,couch_uuids]}, {cookie,nocookie}, {wordsize,8}, {wall_clock,1}] [ns_server:info,2014-08-19T15:40:54.266,nonode@nohost:ns_server_cluster_sup<0.161.0>:log_os_info:start_link:27]Manifest: ["","", " ", " ", " ", " ", " ", " ", " "," ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "," "] [error_logger:info,2014-08-19T15:40:54.268,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.267.0>}, {name,timeout_diag_logger}, {mfargs,{timeout_diag_logger,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T15:40:54.269,nonode@nohost:dist_manager<0.268.0>:dist_manager:read_address_config_from_path:83]Reading ip config from "/opt/couchbase/var/lib/couchbase/ip_start" [ns_server:info,2014-08-19T15:40:54.269,nonode@nohost:dist_manager<0.268.0>:dist_manager:read_address_config_from_path:83]Reading ip config from "/opt/couchbase/var/lib/couchbase/ip" [ns_server:info,2014-08-19T15:40:54.269,nonode@nohost:dist_manager<0.268.0>:dist_manager:init:159]ip config not found. Looks like we're brand new node [error_logger:info,2014-08-19T15:40:54.270,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,inet_gethost_native_sup} started: [{pid,<0.270.0>},{mfa,{inet_gethost_native,init,[[]]}}] [error_logger:info,2014-08-19T15:40:54.270,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,kernel_safe_sup} started: [{pid,<0.269.0>}, {name,inet_gethost_native_sup}, {mfargs,{inet_gethost_native,start_link,[]}}, {restart_type,temporary}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T15:40:54.400,nonode@nohost:dist_manager<0.268.0>:dist_manager:bringup:230]Attempting to bring up net_kernel with name 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T15:40:54.403,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.272.0>}, {name,erl_epmd}, {mfargs,{erl_epmd,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.403,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.273.0>}, {name,auth}, {mfargs,{auth,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [ns_server:info,2014-08-19T15:40:54.404,ns_1@127.0.0.1:dist_manager<0.268.0>:dist_manager:save_node:143]saving node to "/opt/couchbase/var/lib/couchbase/couchbase-server.node" [error_logger:info,2014-08-19T15:40:54.404,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.274.0>}, {name,net_kernel}, {mfargs, {net_kernel,start_link, [['ns_1@127.0.0.1',longnames]]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.404,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,kernel_sup} started: [{pid,<0.271.0>}, {name,net_sup_dynamic}, {mfargs, {erl_distribution,start_link, [['ns_1@127.0.0.1',longnames]]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [ns_server:debug,2014-08-19T15:40:54.428,ns_1@127.0.0.1:dist_manager<0.268.0>:dist_manager:bringup:238]Attempted to save node name to disk: ok [error_logger:info,2014-08-19T15:40:54.429,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.268.0>}, {name,dist_manager}, {mfargs,{dist_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.430,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.277.0>}, {name,ns_cookie_manager}, {mfargs,{ns_cookie_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.432,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.278.0>}, {name,ns_cluster}, {mfargs,{ns_cluster,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [ns_server:info,2014-08-19T15:40:54.433,ns_1@127.0.0.1:ns_config_sup<0.279.0>:ns_config_sup:init:32]loading static ns_config from "/opt/couchbase/etc/couchbase/config" [error_logger:info,2014-08-19T15:40:54.433,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.280.0>}, {name,ns_config_events}, {mfargs, {gen_event,start_link,[{local,ns_config_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.434,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.281.0>}, {name,ns_config_events_local}, {mfargs, {gen_event,start_link, [{local,ns_config_events_local}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2014-08-19T15:40:54.447,ns_1@127.0.0.1:ns_config<0.282.0>:ns_config:load_config:795]Loading static config from "/opt/couchbase/etc/couchbase/config" [ns_server:info,2014-08-19T15:40:54.448,ns_1@127.0.0.1:ns_config<0.282.0>:ns_config:load_config:809]Loading dynamic config from "/opt/couchbase/var/lib/couchbase/config/config.dat" [ns_server:debug,2014-08-19T15:40:54.449,ns_1@127.0.0.1:ns_config<0.282.0>:ns_config:load_config:816]Here's full dynamic config we loaded: [[{uuid, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667480}}]}| <<"9032e293d656a8b04683554c561fe06f">>]}, {cert_and_pkey, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667449}}]}| {<<"-----BEGIN CERTIFICATE-----\nMIICmDCCAYKgAwIBAgIIE4vQOGMt4U8wCwYJKoZIhvcNAQEFMAwxCjAIBgNVBAMT\nASowHhcNMTMwMTAxMDAwMDAwWhcNNDkxMjMxMjM1OTU5WjAMMQowCAYDVQQDEwEq\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5WgTuSJMU8qPdc8uDdst\nav13oFxDpbqz8mIk7TVReVHwO9MvKgi8cqlGev50BaQNfzFW41E/baDmpa8sAlSe\nzPoGcRD5wDJdHRH87FdW8eeE4rA8N9TcsSyJDo0gmWO+Vj+ow5dzF87001UstU6A\n5UQ5anT0dGnKLChpmk0KiKx28+XSnycDQ8osiLRqVU6TF0w4j56YEsiio7JKXdj2\nxytV+84pRrHIxtwy5wbd7M8d61WQqjR5dKc8onmDdDg57RBIgnZze7tOk53eX3Cn\n5/34jyitKxDkVEjMzHsSiBdZjTDegLqmKpxOAiokPEsHxn4XenZyacOM/gcJ6/j+\nfQIDAQABowIwADALBgkqhkiG9w0BAQUDggEBAJDQBZGy6r2NS8CERBUZ5W7ks0uh\npNwXgErwxU3srGRDoLKyN4d2VFo/xoQfmRFryX1M+aVwtfzbnTr0Z6AlRV19I2Zx\n8OCR6Zmk0gHdHbCAoDO8Yu78lbl+yv13SrjzrUpNXG67Zx6thc8Ea7KvCezSSsVM\ngJuJE2Chr0sADmqCfWPeJq/mujZKqDKKl06Myr++6XQ0oOekSnPfyCREcSSoHe9r\nX+ORwhpHb31vtNZc7cRRmfqBUKDmp8QDNj+539k03qyjxVhCGJQcOsPCFjWQBMJD\noyh2dK6J6E43Or43v2sEnMBtMcLl0UEymNnyxDCL2+tjlDn92nMM2fngk4Q=\n-----END CERTIFICATE-----\n">>, <<"*****">>}]}, {server_groups, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667447}}]}, [{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@127.0.0.1']}]]}, {dynamic_config_version, [{'_vclock',[{'ns_1@127.0.0.1',{5,63575667447}}]},2,5]}, {cluster_compat_version, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667447}}]},2,5]}, {vbucket_map_history,[{'_vclock',[{'ns_1@127.0.0.1',{1,63575667447}}]}]}, {otp, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667446}}]}, {cookie,xyzevwdfypcplvpp}]}, {{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{7,63575667445}}]}|{2,3,0}]}, {alert_limits,[{max_overhead_perc,50},{max_disk_used,90}]}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667445}}]}, {enabled,false}, {timeout,120}, {max_nodes,1}, {count,0}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {buckets,[{configs,[]}]}, {drop_request_memory_threshold_mib,undefined}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667445}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {index_aware_rebalance_disabled,false}, {max_bucket_count,10}, {memory_quota,58026}, {nodes_wanted,['ns_1@127.0.0.1']}, {remote_clusters,[]}, {replication,[{enabled,true}]}, {replication_topology,star}, {rest,[{port,8091}]}, {rest_creds,[{creds,[]}]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {{couchdb,max_parallel_indexers},4}, {{couchdb,max_parallel_replica_indexers},2}, {{request_limit,capi},undefined}, {{request_limit,rest},undefined}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock', [{'ns_1@127.0.0.1',{1,63575667445}}, {<<"d026346e5ade950537d992bcb312c471">>,{1,63575667445}}]}, {path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock', [{'ns_1@127.0.0.1',{3,63575667445}}, {<<"d026346e5ade950537d992bcb312c471">>,{1,63575667445}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock', [{'ns_1@127.0.0.1',{1,63575667445}}, {<<"d026346e5ade950537d992bcb312c471">>,{1,63575667445}}]}, {filename,"/opt/couchbase/var/lib/couchbase/ns_log"}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{3,63575667445}}]}, {moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B","binary", "-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',ssl_capi_port},18092}, {{node,'ns_1@127.0.0.1',ssl_proxy_downstream_port},11214}, {{node,'ns_1@127.0.0.1',ssl_proxy_upstream_port},11215}, {{node,'ns_1@127.0.0.1',ssl_rest_port},18091}]] [ns_server:info,2014-08-19T15:40:54.451,ns_1@127.0.0.1:ns_config<0.282.0>:ns_config:load_config:827]Here's full dynamic config we loaded + static & default config: [{{node,'ns_1@127.0.0.1',ssl_rest_port},18091}, {{node,'ns_1@127.0.0.1',ssl_proxy_upstream_port},11215}, {{node,'ns_1@127.0.0.1',ssl_proxy_downstream_port},11214}, {{node,'ns_1@127.0.0.1',ssl_capi_port},18092}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{3,63575667445}}]}, {moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B","binary", "-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol,stream]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock', [{'ns_1@127.0.0.1',{1,63575667445}}, {<<"d026346e5ade950537d992bcb312c471">>,{1,63575667445}}]}, {filename,"/opt/couchbase/var/lib/couchbase/ns_log"}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock', [{'ns_1@127.0.0.1',{3,63575667445}}, {<<"d026346e5ade950537d992bcb312c471">>,{1,63575667445}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock', [{'ns_1@127.0.0.1',{1,63575667445}}, {<<"d026346e5ade950537d992bcb312c471">>,{1,63575667445}}]}, {path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{request_limit,rest},undefined}, {{request_limit,capi},undefined}, {{couchdb,max_parallel_replica_indexers},2}, {{couchdb,max_parallel_indexers},4}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {rest_creds,[{creds,[]}]}, {rest,[{port,8091}]}, {replication_topology,star}, {replication,[{enabled,true}]}, {remote_clusters,[]}, {nodes_wanted,['ns_1@127.0.0.1']}, {memory_quota,58026}, {max_bucket_count,10}, {index_aware_rebalance_disabled,false}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667445}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}]}, {drop_request_memory_threshold_mib,undefined}, {buckets,[{configs,[]}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667445}}]}, {enabled,false}, {timeout,120}, {max_nodes,1}, {count,0}]}, {alert_limits,[{max_overhead_perc,50},{max_disk_used,90}]}, {{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{7,63575667445}}]}|{2,3,0}]}, {otp, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667446}}]}, {cookie,xyzevwdfypcplvpp}]}, {vbucket_map_history,[{'_vclock',[{'ns_1@127.0.0.1',{1,63575667447}}]}]}, {cluster_compat_version, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667447}}]},2,5]}, {dynamic_config_version, [{'_vclock',[{'ns_1@127.0.0.1',{5,63575667447}}]},2,5]}, {server_groups, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667447}}]}, [{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@127.0.0.1']}]]}, {cert_and_pkey, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667449}}]}| {<<"-----BEGIN CERTIFICATE-----\nMIICmDCCAYKgAwIBAgIIE4vQOGMt4U8wCwYJKoZIhvcNAQEFMAwxCjAIBgNVBAMT\nASowHhcNMTMwMTAxMDAwMDAwWhcNNDkxMjMxMjM1OTU5WjAMMQowCAYDVQQDEwEq\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5WgTuSJMU8qPdc8uDdst\nav13oFxDpbqz8mIk7TVReVHwO9MvKgi8cqlGev50BaQNfzFW41E/baDmpa8sAlSe\nzPoGcRD5wDJdHRH87FdW8eeE4rA8N9TcsSyJDo0gmWO+Vj+ow5dzF87001UstU6A\n5UQ5anT0dGnKLChpmk0KiKx28+XSnycDQ8osiLRqVU6TF0w4j56YEsiio7JKXdj2\nxytV+84pRrHIxtwy5wbd7M8d61WQqjR5dKc8onmDdDg57RBIgnZze7tOk53eX3Cn\n5/34jyitKxDkVEjMzHsSiBdZjTDegLqmKpxOAiokPEsHxn4XenZyacOM/gcJ6/j+\nfQIDAQABowIwADALBgkqhkiG9w0BAQUDggEBAJDQBZGy6r2NS8CERBUZ5W7ks0uh\npNwXgErwxU3srGRDoLKyN4d2VFo/xoQfmRFryX1M+aVwtfzbnTr0Z6AlRV19I2Zx\n8OCR6Zmk0gHdHbCAoDO8Yu78lbl+yv13SrjzrUpNXG67Zx6thc8Ea7KvCezSSsVM\ngJuJE2Chr0sADmqCfWPeJq/mujZKqDKKl06Myr++6XQ0oOekSnPfyCREcSSoHe9r\nX+ORwhpHb31vtNZc7cRRmfqBUKDmp8QDNj+539k03qyjxVhCGJQcOsPCFjWQBMJD\noyh2dK6J6E43Or43v2sEnMBtMcLl0UEymNnyxDCL2+tjlDn92nMM2fngk4Q=\n-----END CERTIFICATE-----\n">>, <<"*****">>}]}, {uuid, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667480}}]}| <<"9032e293d656a8b04683554c561fe06f">>]}] [error_logger:info,2014-08-19T15:40:54.453,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.282.0>}, {name,ns_config}, {mfargs, {ns_config,start_link, ["/opt/couchbase/etc/couchbase/config", ns_config_default]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.454,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.284.0>}, {name,ns_config_remote}, {mfargs, {ns_config_replica,start_link, [{local,ns_config_remote}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.455,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.285.0>}, {name,ns_config_log}, {mfargs,{ns_config_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.456,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.287.0>}, {name,cb_config_couch_sync}, {mfargs,{cb_config_couch_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.457,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.279.0>}, {name,ns_config_sup}, {mfargs,{ns_config_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:54.458,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.289.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.463,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.291.0>}, {name,diag_handler_worker}, {mfa,{work_queue,start_link,[diag_handler_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T15:40:54.464,ns_1@127.0.0.1:ns_server_sup<0.290.0>:dir_size:start_link:47]Starting quick version of dir_size with program name: i386-linux-godu [error_logger:info,2014-08-19T15:40:54.465,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.292.0>}, {name,dir_size}, {mfa,{dir_size,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.466,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.293.0>}, {name,request_throttler}, {mfa,{request_throttler,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.469,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,kernel_safe_sup} started: [{pid,<0.295.0>}, {name,timer2_server}, {mfargs,{timer2,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.470,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.294.0>}, {name,ns_log}, {mfa,{ns_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.470,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.296.0>}, {name,ns_crash_log_consumer}, {mfa,{ns_log,start_link_crash_consumer,[]}}, {restart_type,{permanent,4}}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:54.471,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.297.0>}, {name,ns_config_ets_dup}, {mfa,{ns_config_ets_dup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:40:54.471,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: alert_limits -> [{max_overhead_perc,50},{max_disk_used,90}] [ns_server:debug,2014-08-19T15:40:54.471,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: auto_failover_cfg -> [{enabled,false},{timeout,120},{max_nodes,1},{count,0}] [ns_server:debug,2014-08-19T15:40:54.471,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: autocompaction -> [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T15:40:54.472,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[]}] [ns_server:debug,2014-08-19T15:40:54.472,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: cert_and_pkey -> {<<"-----BEGIN CERTIFICATE-----\nMIICmDCCAYKgAwIBAgIIE4vQOGMt4U8wCwYJKoZIhvcNAQEFMAwxCjAIBgNVBAMT\nASowHhcNMTMwMTAxMDAwMDAwWhcNNDkxMjMxMjM1OTU5WjAMMQowCAYDVQQDEwEq\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5WgTuSJMU8qPdc8uDdst\nav13oFxDpbqz8mIk7TVReVHwO9MvKgi8cqlGev50BaQNfzFW41E/baDmpa8sAlSe\nzPoGcRD5wDJdHRH87FdW8eeE4rA8N9TcsSyJDo0gmWO+Vj+ow5dzF87001UstU6A\n5UQ5anT0dGnKLChpmk0KiKx28+XSnycDQ8osiLR"...>>, <<"*****">>} [ns_server:debug,2014-08-19T15:40:54.472,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: cluster_compat_version -> [2,5] [ns_server:debug,2014-08-19T15:40:54.472,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: drop_request_memory_threshold_mib -> undefined [ns_server:debug,2014-08-19T15:40:54.472,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: dynamic_config_version -> [2,5] [ns_server:debug,2014-08-19T15:40:54.473,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: email_alerts -> [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server,[{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}] [ns_server:debug,2014-08-19T15:40:54.473,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: fast_warmup -> [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}] [ns_server:debug,2014-08-19T15:40:54.473,ns_1@127.0.0.1:ns_config_isasl_sync<0.300.0>:ns_config_isasl_sync:init:63]isasl_sync init: ["/opt/couchbase/var/lib/couchbase/isasl.pw","_admin", "051984933ac39a02e4056d80a45e8c36"] [ns_server:debug,2014-08-19T15:40:54.474,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: index_aware_rebalance_disabled -> false [ns_server:debug,2014-08-19T15:40:54.474,ns_1@127.0.0.1:ns_config_isasl_sync<0.300.0>:ns_config_isasl_sync:init:71]isasl_sync init buckets: [] [ns_server:debug,2014-08-19T15:40:54.475,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: max_bucket_count -> 10 [ns_server:debug,2014-08-19T15:40:54.475,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: memory_quota -> 58026 [ns_server:debug,2014-08-19T15:40:54.475,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@127.0.0.1'] [ns_server:debug,2014-08-19T15:40:54.475,ns_1@127.0.0.1:ns_config_isasl_sync<0.300.0>:ns_config_isasl_sync:writeSASLConf:143]Writing isasl passwd file: "/opt/couchbase/var/lib/couchbase/isasl.pw" [ns_server:debug,2014-08-19T15:40:54.475,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: otp -> [{cookie,xyzevwdfypcplvpp}] [ns_server:debug,2014-08-19T15:40:54.475,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: remote_clusters -> [] [ns_server:debug,2014-08-19T15:40:54.475,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: replication -> [{enabled,true}] [ns_server:debug,2014-08-19T15:40:54.475,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: replication_topology -> star [ns_server:debug,2014-08-19T15:40:54.475,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: rest -> [{port,8091}] [ns_server:info,2014-08-19T15:40:54.475,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:handle_info:63]config change: rest_creds -> ******** [ns_server:debug,2014-08-19T15:40:54.475,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: server_groups -> [[{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@127.0.0.1']}]] [ns_server:debug,2014-08-19T15:40:54.475,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: set_view_update_daemon -> [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}] [ns_server:debug,2014-08-19T15:40:54.475,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: uuid -> <<"9032e293d656a8b04683554c561fe06f">> [ns_server:debug,2014-08-19T15:40:54.475,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: vbucket_map_history -> [] [ns_server:debug,2014-08-19T15:40:54.476,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_indexers} -> 4 [ns_server:debug,2014-08-19T15:40:54.476,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_replica_indexers} -> 2 [ns_server:debug,2014-08-19T15:40:54.478,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {request_limit,capi} -> undefined [ns_server:debug,2014-08-19T15:40:54.479,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {request_limit,rest} -> undefined [ns_server:debug,2014-08-19T15:40:54.479,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',capi_port} -> 8092 [ns_server:debug,2014-08-19T15:40:54.479,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T15:40:54.479,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',config_version} -> {2,3,0} [ns_server:warn,2014-08-19T15:40:54.479,ns_1@127.0.0.1:ns_config_isasl_sync<0.300.0>:ns_memcached:connect:1161]Unable to connect: {error,{badmatch,{error,econnrefused}}}, retrying. [ns_server:debug,2014-08-19T15:40:54.479,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T15:40:54.479,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',membership} -> active [ns_server:debug,2014-08-19T15:40:54.479,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T15:40:54.479,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T15:40:54.480,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [ns_server:debug,2014-08-19T15:40:54.480,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T15:40:54.480,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T15:40:54.480,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T15:40:54.480,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T15:40:54.480,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T15:40:54.481,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_rest_port} -> 18091 [error_logger:info,2014-08-19T15:40:55.480,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.300.0>}, {name,ns_config_isasl_sync}, {mfa,{ns_config_isasl_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.480,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.304.0>}, {name,ns_log_events}, {mfa,{gen_event,start_link,[{local,ns_log_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.481,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.306.0>}, {name,ns_node_disco_events}, {mfargs, {gen_event,start_link, [{local,ns_node_disco_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:40:55.482,ns_1@127.0.0.1:ns_node_disco<0.307.0>:ns_node_disco:init:103]Initting ns_node_disco with [] [ns_server:debug,2014-08-19T15:40:55.482,ns_1@127.0.0.1:ns_cookie_manager<0.277.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [user:info,2014-08-19T15:40:55.482,ns_1@127.0.0.1:ns_cookie_manager<0.277.0>:ns_cookie_manager:do_cookie_sync:130]Node 'ns_1@127.0.0.1' synchronized otp cookie xyzevwdfypcplvpp from cluster [ns_server:debug,2014-08-19T15:40:55.483,ns_1@127.0.0.1:ns_cookie_manager<0.277.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T15:40:55.512,ns_1@127.0.0.1:ns_cookie_manager<0.277.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T15:40:55.513,ns_1@127.0.0.1:<0.308.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T15:40:55.513,ns_1@127.0.0.1:<0.308.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: xyzevwdfypcplvpp [error_logger:info,2014-08-19T15:40:55.514,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.307.0>}, {name,ns_node_disco}, {mfargs,{ns_node_disco,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.514,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.310.0>}, {name,ns_node_disco_log}, {mfargs,{ns_node_disco_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.515,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.311.0>}, {name,ns_node_disco_conf_events}, {mfargs,{ns_node_disco_conf_events,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:40:55.516,ns_1@127.0.0.1:ns_config_rep<0.313.0>:ns_config_rep:init:66]init pulling [error_logger:info,2014-08-19T15:40:55.516,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.312.0>}, {name,ns_config_rep_merger}, {mfargs,{ns_config_rep,start_link_merger,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:40:55.516,ns_1@127.0.0.1:ns_config_rep<0.313.0>:ns_config_rep:init:68]init pushing [ns_server:debug,2014-08-19T15:40:55.519,ns_1@127.0.0.1:ns_config_rep<0.313.0>:ns_config_rep:init:72]init reannouncing [ns_server:debug,2014-08-19T15:40:55.519,ns_1@127.0.0.1:ns_config_events<0.280.0>:ns_node_disco_conf_events:handle_event:44]ns_node_disco_conf_events config on nodes_wanted [ns_server:debug,2014-08-19T15:40:55.519,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: alert_limits -> [{max_overhead_perc,50},{max_disk_used,90}] [ns_server:debug,2014-08-19T15:40:55.519,ns_1@127.0.0.1:ns_config_events<0.280.0>:ns_node_disco_conf_events:handle_event:50]ns_node_disco_conf_events config on otp [ns_server:debug,2014-08-19T15:40:55.519,ns_1@127.0.0.1:ns_cookie_manager<0.277.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T15:40:55.519,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: auto_failover_cfg -> [{enabled,false},{timeout,120},{max_nodes,1},{count,0}] [ns_server:debug,2014-08-19T15:40:55.519,ns_1@127.0.0.1:ns_cookie_manager<0.277.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T15:40:55.519,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: autocompaction -> [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T15:40:55.519,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[]}] [ns_server:debug,2014-08-19T15:40:55.520,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: cert_and_pkey -> {<<"-----BEGIN CERTIFICATE-----\nMIICmDCCAYKgAwIBAgIIE4vQOGMt4U8wCwYJKoZIhvcNAQEFMAwxCjAIBgNVBAMT\nASowHhcNMTMwMTAxMDAwMDAwWhcNNDkxMjMxMjM1OTU5WjAMMQowCAYDVQQDEwEq\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5WgTuSJMU8qPdc8uDdst\nav13oFxDpbqz8mIk7TVReVHwO9MvKgi8cqlGev50BaQNfzFW41E/baDmpa8sAlSe\nzPoGcRD5wDJdHRH87FdW8eeE4rA8N9TcsSyJDo0gmWO+Vj+ow5dzF87001UstU6A\n5UQ5anT0dGnKLChpmk0KiKx28+XSnycDQ8osiLR"...>>, <<"*****">>} [ns_server:debug,2014-08-19T15:40:55.520,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: cluster_compat_version -> [2,5] [error_logger:info,2014-08-19T15:40:55.520,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.313.0>}, {name,ns_config_rep}, {mfargs,{ns_config_rep,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:40:55.520,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: drop_request_memory_threshold_mib -> undefined [ns_server:debug,2014-08-19T15:40:55.520,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: dynamic_config_version -> [2,5] [error_logger:info,2014-08-19T15:40:55.520,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.305.0>}, {name,ns_node_disco_sup}, {mfa,{ns_node_disco_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2014-08-19T15:40:55.520,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: email_alerts -> [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server,[{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}] [ns_server:debug,2014-08-19T15:40:55.520,ns_1@127.0.0.1:ns_config_rep<0.313.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([alert_limits,auto_failover_cfg,autocompaction, buckets,cert_and_pkey,cluster_compat_version, drop_request_memory_threshold_mib, dynamic_config_version,email_alerts, fast_warmup,index_aware_rebalance_disabled, max_bucket_count,memory_quota,nodes_wanted,otp, remote_clusters,replication, replication_topology,rest,rest_creds, server_groups,set_view_update_daemon,uuid, vbucket_map_history, {couchdb,max_parallel_indexers}, {couchdb,max_parallel_replica_indexers}, {request_limit,capi}, {request_limit,rest}, {node,'ns_1@127.0.0.1',capi_port}, {node,'ns_1@127.0.0.1',compaction_daemon}, {node,'ns_1@127.0.0.1',config_version}, {node,'ns_1@127.0.0.1',isasl}, {node,'ns_1@127.0.0.1',membership}, {node,'ns_1@127.0.0.1',memcached}, {node,'ns_1@127.0.0.1',moxi}, {node,'ns_1@127.0.0.1',ns_log}, {node,'ns_1@127.0.0.1',port_servers}, {node,'ns_1@127.0.0.1',rest}, {node,'ns_1@127.0.0.1',ssl_capi_port}, {node,'ns_1@127.0.0.1', ssl_proxy_downstream_port}, {node,'ns_1@127.0.0.1',ssl_proxy_upstream_port}, {node,'ns_1@127.0.0.1',ssl_rest_port}]..) [ns_server:debug,2014-08-19T15:40:55.520,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: fast_warmup -> [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}] [ns_server:debug,2014-08-19T15:40:55.521,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: index_aware_rebalance_disabled -> false [ns_server:debug,2014-08-19T15:40:55.521,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: max_bucket_count -> 10 [ns_server:debug,2014-08-19T15:40:55.521,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: memory_quota -> 58026 [ns_server:debug,2014-08-19T15:40:55.522,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@127.0.0.1'] [ns_server:debug,2014-08-19T15:40:55.522,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: otp -> [{cookie,xyzevwdfypcplvpp}] [error_logger:info,2014-08-19T15:40:55.522,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.319.0>}, {name,vbucket_map_mirror}, {mfa,{vbucket_map_mirror,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:40:55.522,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: remote_clusters -> [] [ns_server:debug,2014-08-19T15:40:55.522,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: replication -> [{enabled,true}] [ns_server:debug,2014-08-19T15:40:55.522,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: replication_topology -> star [ns_server:debug,2014-08-19T15:40:55.522,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: rest -> [{port,8091}] [ns_server:info,2014-08-19T15:40:55.522,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:handle_info:63]config change: rest_creds -> ******** [ns_server:debug,2014-08-19T15:40:55.522,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: server_groups -> [[{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@127.0.0.1']}]] [ns_server:debug,2014-08-19T15:40:55.522,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: set_view_update_daemon -> [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}] [ns_server:debug,2014-08-19T15:40:55.523,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: uuid -> <<"9032e293d656a8b04683554c561fe06f">> [ns_server:debug,2014-08-19T15:40:55.523,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: vbucket_map_history -> [] [ns_server:debug,2014-08-19T15:40:55.523,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_indexers} -> 4 [ns_server:debug,2014-08-19T15:40:55.523,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_replica_indexers} -> 2 [ns_server:debug,2014-08-19T15:40:55.523,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {request_limit,capi} -> undefined [ns_server:debug,2014-08-19T15:40:55.523,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {request_limit,rest} -> undefined [ns_server:debug,2014-08-19T15:40:55.523,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',capi_port} -> 8092 [ns_server:debug,2014-08-19T15:40:55.523,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T15:40:55.523,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',config_version} -> {2,3,0} [ns_server:debug,2014-08-19T15:40:55.523,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T15:40:55.523,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',membership} -> active [ns_server:debug,2014-08-19T15:40:55.524,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T15:40:55.524,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T15:40:55.524,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [ns_server:debug,2014-08-19T15:40:55.524,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T15:40:55.524,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T15:40:55.525,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T15:40:55.525,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T15:40:55.525,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T15:40:55.525,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_rest_port} -> 18091 [ns_server:debug,2014-08-19T15:40:55.546,ns_1@127.0.0.1:ns_cookie_manager<0.277.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T15:40:55.547,ns_1@127.0.0.1:ns_cookie_manager<0.277.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T15:40:55.547,ns_1@127.0.0.1:<0.316.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: xyzevwdfypcplvpp [error_logger:info,2014-08-19T15:40:55.547,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.321.0>}, {name,bucket_info_cache}, {mfa,{bucket_info_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:40:55.547,ns_1@127.0.0.1:ns_cookie_manager<0.277.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T15:40:55.547,ns_1@127.0.0.1:<0.316.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: xyzevwdfypcplvpp [error_logger:info,2014-08-19T15:40:55.547,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.324.0>}, {name,ns_tick_event}, {mfa,{gen_event,start_link,[{local,ns_tick_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.547,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.325.0>}, {name,buckets_events}, {mfa,{gen_event,start_link,[{local,buckets_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:40:55.548,ns_1@127.0.0.1:ns_log_events<0.304.0>:ns_mail_log:init:44]ns_mail_log started up [error_logger:info,2014-08-19T15:40:55.548,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_mail_sup} started: [{pid,<0.327.0>}, {name,ns_mail_log}, {mfargs,{ns_mail_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.548,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.326.0>}, {name,ns_mail_sup}, {mfa,{ns_mail_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:55.548,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.328.0>}, {name,ns_stats_event}, {mfa,{gen_event,start_link,[{local,ns_stats_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.549,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.329.0>}, {name,samples_loader_tasks}, {mfa,{samples_loader_tasks,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:40:55.571,ns_1@127.0.0.1:ns_cookie_manager<0.277.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T15:40:55.571,ns_1@127.0.0.1:<0.317.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T15:40:55.572,ns_1@127.0.0.1:<0.317.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: xyzevwdfypcplvpp [error_logger:info,2014-08-19T15:40:55.573,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.330.0>}, {name,ns_heart}, {mfa,{ns_heart,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.575,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.334.0>}, {name,ns_doctor}, {mfa,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:40:55.576,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.332.0>:ns_heart:current_status_slow:248]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} [ns_server:info,2014-08-19T15:40:55.580,ns_1@127.0.0.1:remote_clusters_info<0.337.0>:remote_clusters_info:read_or_create_table:540]Reading remote_clusters_info content from /opt/couchbase/var/lib/couchbase/remote_clusters_cache_v3 [error_logger:info,2014-08-19T15:40:55.582,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.337.0>}, {name,remote_clusters_info}, {mfa,{remote_clusters_info,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.583,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.340.0>}, {name,master_activity_events}, {mfa, {gen_event,start_link, [{local,master_activity_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T15:40:55.583,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.332.0>:ns_heart:grab_local_xdcr_replications:438]Ignoring exception getting xdcr replication infos {exit,{noproc,{gen_server,call,[xdc_replication_sup,which_children,infinity]}}, [{gen_server,call,3}, {xdc_replication_sup,all_local_replication_infos,0}, {ns_heart,grab_local_xdcr_replications,0}, {ns_heart,current_status_slow,0}, {ns_heart,slow_updater_loop,1}, {proc_lib,init_p_do_apply,3}]} [ns_server:debug,2014-08-19T15:40:55.585,ns_1@127.0.0.1:ns_server_sup<0.290.0>:mb_master:check_master_takeover_needed:141]Sending master node question to the following nodes: [] [ns_server:debug,2014-08-19T15:40:55.585,ns_1@127.0.0.1:ns_server_sup<0.290.0>:mb_master:check_master_takeover_needed:143]Got replies: [] [ns_server:debug,2014-08-19T15:40:55.585,ns_1@127.0.0.1:ns_server_sup<0.290.0>:mb_master:check_master_takeover_needed:149]Was unable to discover master, not going to force mastership takeover [user:info,2014-08-19T15:40:55.587,ns_1@127.0.0.1:mb_master<0.343.0>:mb_master:init:86]I'm the only node, so I'm the master. [ns_server:debug,2014-08-19T15:40:55.591,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.332.0>:ns_heart:current_status_slow:248]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} [ns_server:debug,2014-08-19T15:40:55.592,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.332.0>:ns_heart:grab_local_xdcr_replications:438]Ignoring exception getting xdcr replication infos {exit,{noproc,{gen_server,call,[xdc_replication_sup,which_children,infinity]}}, [{gen_server,call,3}, {xdc_replication_sup,all_local_replication_infos,0}, {ns_heart,grab_local_xdcr_replications,0}, {ns_heart,current_status_slow,0}, {ns_heart,slow_updater_loop,1}]} [ns_server:debug,2014-08-19T15:40:55.598,ns_1@127.0.0.1:ns_config_log<0.285.0>:ns_config_log:log_common:138]config change: dynamic_config_version -> [2,5] [ns_server:debug,2014-08-19T15:40:55.598,ns_1@127.0.0.1:ns_config_rep<0.313.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([dynamic_config_version]..) [ns_server:debug,2014-08-19T15:40:55.599,ns_1@127.0.0.1:mb_master_sup<0.349.0>:misc:start_singleton:986]start_singleton(gen_fsm, ns_orchestrator, [], []): started as <0.350.0> on 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T15:40:55.599,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.350.0>}, {name,ns_orchestrator}, {mfargs,{ns_orchestrator,start_link,[]}}, {restart_type,permanent}, {shutdown,20}, {child_type,worker}] [ns_server:debug,2014-08-19T15:40:55.600,ns_1@127.0.0.1:mb_master_sup<0.349.0>:misc:start_singleton:986]start_singleton(gen_server, ns_tick, [], []): started as <0.353.0> on 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T15:40:55.600,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.353.0>}, {name,ns_tick}, {mfargs,{ns_tick,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [ns_server:debug,2014-08-19T15:40:55.603,ns_1@127.0.0.1:<0.354.0>:auto_failover:init:134]init auto_failover. [ns_server:debug,2014-08-19T15:40:55.603,ns_1@127.0.0.1:mb_master_sup<0.349.0>:misc:start_singleton:986]start_singleton(gen_server, auto_failover, [], []): started as <0.354.0> on 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T15:40:55.603,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.354.0>}, {name,auto_failover}, {mfargs,{auto_failover,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.603,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.343.0>}, {name,mb_master}, {mfa,{mb_master,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:55.604,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.355.0>}, {name,master_activity_events_ingress}, {mfa, {gen_event,start_link, [{local,master_activity_events_ingress}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.604,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.356.0>}, {name,master_activity_events_timestamper}, {mfa, {master_activity_events,start_link_timestamper,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.622,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.357.0>}, {name,master_activity_events_pids_watcher}, {mfa, {master_activity_events_pids_watcher,start_link, []}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.639,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.358.0>}, {name,master_activity_events_keeper}, {mfa,{master_activity_events_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.672,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_ssl_services_sup} started: [{pid,<0.362.0>}, {name,ns_ssl_services_setup}, {mfargs,{ns_ssl_services_setup,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:40:55.674,ns_1@127.0.0.1:ns_ssl_services_setup<0.362.0>:ns_ssl_services_setup:restart_xdcr_proxy:201]Xdcr proxy restart failed. But that's usually normal. {'EXIT', {{badmatch, {badrpc, {'EXIT', {{case_clause, false}, [{ns_child_ports_sup, restart_port_by_name, 1}, {rpc, '-handle_call_call/6-fun-0-', 5}]}}}}, [{ns_ports_setup, restart_xdcr_proxy, 0}, {ns_ssl_services_setup, restart_xdcr_proxy, 0}, {ns_ssl_services_setup, init,1}, {gen_server,init_it, 6}, {proc_lib, init_p_do_apply, 3}]}} [error_logger:info,2014-08-19T15:40:55.694,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_ssl_services_sup} started: [{pid,<0.364.0>}, {name,ns_rest_ssl_service}, {mfargs, {ns_ssl_services_setup,start_link_rest_service,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.695,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_ssl_services_sup} started: [{pid,<0.381.0>}, {name,ns_capi_ssl_service}, {mfargs, {ns_ssl_services_setup,start_link_capi_service,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.696,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.361.0>}, {name,ns_ssl_services_sup}, {mfargs,{ns_ssl_services_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:55.696,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.398.0>}, {name,menelaus_ui_auth}, {mfargs,{menelaus_ui_auth,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.697,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.399.0>}, {name,menelaus_web_cache}, {mfargs,{menelaus_web_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.698,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.400.0>}, {name,menelaus_stats_gatherer}, {mfargs,{menelaus_stats_gatherer,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.699,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.401.0>}, {name,menelaus_web}, {mfargs,{menelaus_web,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.700,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.418.0>}, {name,menelaus_event}, {mfargs,{menelaus_event,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.700,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.419.0>}, {name,hot_keys_keeper}, {mfargs,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.704,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.420.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [user:info,2014-08-19T15:40:55.704,ns_1@127.0.0.1:ns_server_sup<0.290.0>:menelaus_sup:start_link:44]Couchbase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. [error_logger:info,2014-08-19T15:40:55.704,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.360.0>}, {name,menelaus}, {mfa,{menelaus_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:55.706,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.422.0>}, {name,mc_couch_events}, {mfargs, {gen_event,start_link,[{local,mc_couch_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.706,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.423.0>}, {name,mc_conn_sup}, {mfargs,{mc_conn_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,supervisor}] [ns_server:info,2014-08-19T15:40:55.707,ns_1@127.0.0.1:<0.424.0>:mc_tcp_listener:init:24]mccouch is listening on port 11213 [error_logger:info,2014-08-19T15:40:55.707,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.424.0>}, {name,mc_tcp_listener}, {mfargs,{mc_tcp_listener,start_link,[11213]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.707,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.421.0>}, {name,mc_sup}, {mfa,{mc_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:55.707,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.425.0>}, {name,ns_ports_setup}, {mfa,{ns_ports_setup,start,[]}}, {restart_type,{permanent,4}}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.708,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.426.0>}, {name,ns_port_memcached_killer}, {mfa,{ns_ports_setup,start_memcached_force_killer,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2014-08-19T15:40:55.708,ns_1@127.0.0.1:<0.428.0>:ns_memcached_log_rotator:init:28]Starting log rotator on "/opt/couchbase/var/lib/couchbase/logs"/"memcached.log"* with an initial period of 39003ms [error_logger:info,2014-08-19T15:40:55.709,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.428.0>}, {name,ns_memcached_log_rotator}, {mfa,{ns_memcached_log_rotator,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.712,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.430.0>}, {name,memcached_clients_pool}, {mfa,{memcached_clients_pool,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.714,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.431.0>}, {name,proxied_memcached_clients_pool}, {mfa,{proxied_memcached_clients_pool,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.714,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.432.0>}, {name,xdc_lhttpc_pool}, {mfa, {lhttpc_manager,start_link, [[{name,xdc_lhttpc_pool}, {connection_timeout,120000}, {pool_size,200}]]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.714,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.433.0>}, {name,ns_null_connection_pool}, {mfa, {ns_null_connection_pool,start_link, [ns_null_connection_pool]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.715,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.434.0>}, {name,xdc_replication_sup}, {mfa,{xdc_replication_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:55.716,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.435.0>}, {name,xdc_rep_manager}, {mfa,{xdc_rep_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.717,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.437.0>}, {name,ns_memcached_sockets_pool}, {mfa,{ns_memcached_sockets_pool,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.719,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_worker_sup} started: [{pid,<0.440.0>}, {name,ns_bucket_worker}, {mfargs,{work_queue,start_link,[ns_bucket_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.720,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.442.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.720,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_worker_sup} started: [{pid,<0.441.0>}, {name,ns_bucket_sup}, {mfargs,{ns_bucket_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:55.721,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.439.0>}, {name,ns_bucket_worker_sup}, {mfa,{ns_bucket_worker_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:55.721,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.443.0>}, {name,system_stats_collector}, {mfa,{system_stats_collector,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.722,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.446.0>}, {name,{stats_archiver,"@system"}}, {mfa,{stats_archiver,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.722,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.448.0>}, {name,{stats_reader,"@system"}}, {mfa,{stats_reader,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T15:40:55.727,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.449.0>}, {name,compaction_daemon}, {mfa,{compaction_daemon,start_link,[]}}, {restart_type,{permanent,4}}, {shutdown,86400000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:40:55.728,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:40:55.728,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:40:55.732,ns_1@127.0.0.1:xdc_rdoc_replication_srv<0.451.0>:xdc_rdoc_replication_srv:init:76]Loaded the following docs: [] [ns_server:debug,2014-08-19T15:40:55.732,ns_1@127.0.0.1:xdc_rdoc_replication_srv<0.451.0>:xdc_rdoc_replication_srv:handle_info:154]doing replicate_newnodes_docs [error_logger:info,2014-08-19T15:40:55.732,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.451.0>}, {name,xdc_rdoc_replication_srv}, {mfa,{xdc_rdoc_replication_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T15:40:55.733,ns_1@127.0.0.1:set_view_update_daemon<0.453.0>:set_view_update_daemon:init:50]Set view update daemon, starting with the following settings: update interval: 5000ms minimum number of changes: 5000 [error_logger:info,2014-08-19T15:40:55.733,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.453.0>}, {name,set_view_update_daemon}, {mfa,{set_view_update_daemon,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T15:40:55.733,ns_1@127.0.0.1:<0.2.0>:child_erlang:child_loop:104]Entered child_loop [error_logger:info,2014-08-19T15:40:55.733,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.290.0>}, {name,ns_server_sup}, {mfargs,{ns_server_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T15:40:55.733,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: ns_server started_at: 'ns_1@127.0.0.1' [ns_server:debug,2014-08-19T15:41:25.729,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:41:25.729,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:41:55.730,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:41:55.730,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:42:25.731,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:42:25.731,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:42:55.732,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:42:55.732,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:43:25.733,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:43:25.733,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:43:55.734,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:43:55.734,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:44:25.735,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:44:25.735,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:44:55.736,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:44:55.736,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:45:25.737,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:45:25.737,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:45:55.738,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:45:55.739,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:46:25.741,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:46:25.741,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:46:55.742,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:46:55.743,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:47:25.744,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:47:25.745,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:47:55.746,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:47:55.746,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:48:25.747,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:48:25.747,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:48:55.748,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:48:55.748,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:49:25.749,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:49:25.749,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:49:55.750,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:49:55.750,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:50:25.751,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:50:25.751,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:50:55.752,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:50:55.752,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:51:25.753,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:51:25.753,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:51:55.754,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:51:55.754,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:52:25.755,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:52:25.755,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:52:55.756,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:52:55.756,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:53:25.757,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:53:25.757,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:53:55.758,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:53:55.758,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:54:25.759,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:54:25.759,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:54:55.782,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:54:55.782,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:55:25.783,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:55:25.783,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:55:55.784,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:55:55.784,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:56:25.785,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:56:25.785,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:56:55.788,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:56:55.788,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:57:25.789,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:57:25.789,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:57:55.790,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:57:55.790,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:58:25.791,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:58:25.791,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:58:55.792,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:58:55.792,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:59:25.793,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:59:25.793,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T15:59:55.794,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T15:59:55.794,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:00:25.795,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:00:25.795,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:00:55.796,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:00:55.796,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:01:25.797,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:01:25.797,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:01:55.798,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:01:55.798,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:02:25.799,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:02:25.799,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:02:55.806,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:02:55.806,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:03:25.808,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:03:25.808,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:03:55.809,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:03:55.809,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:04:25.810,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:04:25.810,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:04:55.811,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:04:55.811,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:05:25.812,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:05:25.812,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:05:55.813,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:05:55.813,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:06:25.814,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:06:25.814,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:06:55.815,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:06:55.815,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:07:25.816,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:07:25.816,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:07:55.817,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:07:55.817,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:08:25.818,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:08:25.818,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:08:55.819,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:08:55.819,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:09:25.820,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:09:25.820,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:09:55.821,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:09:55.821,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:10:25.822,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:10:25.822,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:10:55.823,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:10:55.823,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:11:25.824,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:11:25.824,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:11:55.825,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:11:55.825,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:12:25.826,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:12:25.826,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:12:55.827,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:12:55.827,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:13:25.828,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:13:25.828,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:13:55.829,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:13:55.829,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:14:25.830,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:14:25.830,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:14:55.831,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:14:55.831,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:15:25.832,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:15:25.832,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:15:55.833,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:15:55.833,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:16:25.834,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:16:25.834,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:16:55.835,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:16:55.836,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:17:25.837,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:17:25.837,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:17:55.838,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:17:55.838,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:18:25.839,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:18:25.839,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:18:55.840,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:18:55.840,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:19:25.841,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:19:25.841,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:19:55.842,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:19:55.842,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:20:25.843,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:20:25.843,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:20:55.844,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:20:55.844,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:21:25.845,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:21:25.845,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:21:55.846,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:21:55.846,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:22:25.847,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:22:25.847,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:22:55.848,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:22:55.848,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:23:25.849,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:23:25.849,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:23:55.850,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:23:55.850,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:24:25.851,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:24:25.851,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:24:55.852,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:24:55.852,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:25:25.853,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:25:25.853,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:25:55.854,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:25:55.854,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:26:25.855,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:26:25.855,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:26:55.856,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:26:55.856,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:27:25.857,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:27:25.857,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:27:55.858,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:27:55.858,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:28:25.859,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:28:25.859,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:28:55.860,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:28:55.860,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:29:25.861,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:29:25.861,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:29:55.862,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:29:55.862,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:30:25.863,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:30:25.863,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:30:55.864,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:30:55.865,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:31:25.866,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:31:25.866,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:31:55.867,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:31:55.867,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:32:25.868,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:32:25.868,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:32:55.869,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:32:55.869,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:33:25.870,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:33:25.870,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:33:55.871,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:33:55.872,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:34:25.873,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:34:25.873,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:34:55.874,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:34:55.874,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:35:25.875,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:35:25.875,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:35:55.876,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:35:55.876,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:36:25.877,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:36:25.877,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:36:55.878,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:36:55.878,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:37:25.879,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:37:25.879,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:37:55.880,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:37:55.880,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:38:25.881,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:38:25.881,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:38:55.882,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:38:55.882,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:39:25.883,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:39:25.883,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:39:55.884,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:39:55.884,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:40:25.885,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:40:25.885,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:40:55.886,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:40:55.886,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:41:25.887,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:41:25.887,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:41:55.888,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:41:55.888,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:42:25.889,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:42:25.889,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:42:55.890,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:42:55.890,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:43:25.891,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:43:25.891,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:43:55.892,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:43:55.892,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:44:25.893,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:44:25.893,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:44:55.894,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:44:55.894,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:45:25.895,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:45:25.895,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:45:55.896,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:45:55.896,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:46:25.897,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:46:25.897,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:46:55.898,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:46:55.898,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:47:25.899,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:47:25.899,ns_1@127.0.0.1:compaction_daemon<0.449.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [user:info,2014-08-19T16:47:52.487,ns_1@127.0.0.1:<0.11605.0>:ns_storage_conf:setup_disk_storage_conf:116]Setting database directory path to /var/lib/pgsql and index directory path to /var/lib/pgsql [ns_server:info,2014-08-19T16:47:52.488,ns_1@127.0.0.1:<0.11605.0>:ns_storage_conf:setup_disk_storage_conf:124]Removing all the buckets because database path has changed (old database path /opt/couchbase/var/lib/couchbase/data) [ns_server:info,2014-08-19T16:47:52.488,ns_1@127.0.0.1:<0.11605.0>:ns_storage_conf:setup_disk_storage_conf:130]Removing all unused database files [ns_server:debug,2014-08-19T16:47:52.495,ns_1@127.0.0.1:<0.454.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.453.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:47:52.495,ns_1@127.0.0.1:<0.450.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.449.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:47:52.778,ns_1@127.0.0.1:<0.447.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_stats_event,<0.446.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:47:52.778,ns_1@127.0.0.1:<0.445.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_tick_event,<0.443.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:47:52.778,ns_1@127.0.0.1:<0.442.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.441.0>} exited with reason shutdown [error_logger:error,2014-08-19T16:47:52.779,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_bucket_sup} Context: shutdown_error Reason: normal Offender: [{pid,<0.442.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:52.779,ns_1@127.0.0.1:<0.429.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.426.0>} exited with reason killed [ns_server:debug,2014-08-19T16:47:52.779,ns_1@127.0.0.1:<0.427.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.425.0>} exited with reason killed [ns_server:debug,2014-08-19T16:47:52.780,ns_1@127.0.0.1:<0.363.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.362.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:47:52.780,ns_1@127.0.0.1:<0.359.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {master_activity_events,<0.358.0>} exited with reason killed [ns_server:info,2014-08-19T16:47:52.780,ns_1@127.0.0.1:mb_master<0.343.0>:mb_master:terminate:299]Synchronously shutting down child mb_master_sup [ns_server:debug,2014-08-19T16:47:52.780,ns_1@127.0.0.1:<0.331.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {buckets_events,<0.330.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:47:52.780,ns_1@127.0.0.1:<0.344.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.343.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:47:52.781,ns_1@127.0.0.1:<0.335.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.334.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:47:52.781,ns_1@127.0.0.1:<0.320.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.319.0>} exited with reason killed [ns_server:debug,2014-08-19T16:47:52.781,ns_1@127.0.0.1:<0.323.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.321.0>} exited with reason killed [ns_server:debug,2014-08-19T16:47:52.781,ns_1@127.0.0.1:<0.314.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events_local,<0.313.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:47:52.781,ns_1@127.0.0.1:<0.298.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.297.0>} exited with reason killed [ns_server:debug,2014-08-19T16:47:52.781,ns_1@127.0.0.1:<0.302.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.300.0>} exited with reason shutdown [error_logger:error,2014-08-19T16:47:52.783,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================CRASH REPORT========================= crasher: initial call: gen_event:init_it/6 pid: <0.322.0> registered_name: bucket_info_cache_invalidations exception exit: killed in function gen_event:terminate_server/4 ancestors: [bucket_info_cache,ns_server_sup,ns_server_cluster_sup, <0.58.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 233 stack_size: 24 reductions: 119 neighbours: [ns_server:debug,2014-08-19T16:47:52.884,ns_1@127.0.0.1:ns_config<0.282.0>:ns_config:wait_saver:652]Done waiting for saver. [ns_server:debug,2014-08-19T16:47:52.884,ns_1@127.0.0.1:<0.288.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.287.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:47:52.884,ns_1@127.0.0.1:<0.286.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.285.0>} exited with reason shutdown [error_logger:error,2014-08-19T16:47:52.884,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================SUPERVISOR REPORT========================= Supervisor: {local,ns_server_cluster_sup} Context: shutdown_error Reason: killed Offender: [{pid,<0.289.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [error_logger:error,2014-08-19T16:47:52.886,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================CRASH REPORT========================= crasher: initial call: couch_file:spawn_writer/2 pid: <0.214.0> registered_name: [] exception exit: {noproc, {gen_server,call, [couch_file_write_guard, {remove,<0.214.0>}, infinity]}} in function gen_server:call/3 in call from couch_file:writer_loop/4 ancestors: [<0.212.0>,couch_server,couch_primary_services, couch_server_sup,cb_couch_sup,ns_server_cluster_sup, <0.58.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 377 stack_size: 24 reductions: 651 neighbours: [error_logger:error,2014-08-19T16:47:52.886,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:119]** Generic server <0.215.0> terminating ** Last message in was {'EXIT',<0.211.0>,killed} ** When Server state == {db,<0.215.0>,<0.216.0>,nil,<<"1408448454194506">>, <0.212.0>,<0.217.0>, {db_header,11,0,nil,nil,nil,0,nil,nil}, 0, {btree,<0.212.0>,nil, #Fun, #Fun, #Fun, #Fun,1279, 2558,true}, {btree,<0.212.0>,nil, #Fun, #Fun, #Fun, #Fun,1279, 2558,true}, {btree,<0.212.0>,nil,identity,identity, #Fun,nil,1279,2558, true}, 0,<<"_replicator">>, "/opt/couchbase/var/lib/couchbase/data/_replicator.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2014-08-19T16:47:52.886,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_msg:119]** Generic server <0.221.0> terminating ** Last message in was {'EXIT',<0.211.0>,killed} ** When Server state == {db,<0.221.0>,<0.222.0>,nil,<<"1408448454200635">>, <0.218.0>,<0.223.0>, {db_header,11,1, <<0,0,0,0,13,103,0,0,0,0,0,51,0,0,0,0,1,0,0,0, 0,0,0,0,0,0,13,69>>, <<0,0,0,0,13,154,0,0,0,0,0,49,0,0,0,0,1>>, nil,0,nil,nil}, 1, {btree,<0.218.0>, {3431, <<0,0,0,0,1,0,0,0,0,0,0,0,0,0,13,69>>, 51}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.218.0>, {3482,<<0,0,0,0,1>>,49}, #Fun, #Fun, #Fun, #Fun,1279,2558, true}, {btree,<0.218.0>,nil,identity,identity, #Fun,nil,1279,2558, true}, 1,<<"_users">>, "/opt/couchbase/var/lib/couchbase/data/_users.couch.1", [],nil, {user_ctx,null,[],undefined}, nil, [before_header,after_header,on_file_open], []} ** Reason for termination == ** killed [error_logger:error,2014-08-19T16:47:52.887,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.215.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.58.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 610 stack_size: 24 reductions: 249 neighbours: [error_logger:error,2014-08-19T16:47:52.887,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================CRASH REPORT========================= crasher: initial call: couch_db:init/1 pid: <0.221.0> registered_name: [] exception exit: killed in function gen_server:terminate/6 ancestors: [couch_server,couch_primary_services,couch_server_sup, cb_couch_sup,ns_server_cluster_sup,<0.58.0>] messages: [] links: [] dictionary: [] trap_exit: true status: running heap_size: 610 stack_size: 24 reductions: 214 neighbours: [error_logger:info,2014-08-19T16:47:52.887,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================INFO REPORT========================= application: mapreduce exited: stopped type: temporary [error_logger:info,2014-08-19T16:47:52.888,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================INFO REPORT========================= application: couch_view_parser exited: stopped type: temporary [error_logger:info,2014-08-19T16:47:52.888,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================INFO REPORT========================= application: couch_index_merger exited: stopped type: temporary [error_logger:info,2014-08-19T16:47:52.888,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================INFO REPORT========================= application: couch_set_view exited: stopped type: temporary [error_logger:info,2014-08-19T16:47:52.888,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: couch_view_parser started_at: 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T16:47:52.888,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: couch_set_view started_at: 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T16:47:52.888,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: couch_index_merger started_at: 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T16:47:52.888,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= application: mapreduce started_at: 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T16:47:52.913,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.11675.0>}, {name,couch_config}, {mfargs, {couch_server_sup,couch_config_start_link_wrapper, [["/opt/couchbase/etc/couchdb/default.ini", "/opt/couchbase/etc/couchdb/default.d/capi.ini", "/opt/couchbase/etc/couchdb/default.d/geocouch.ini", "/opt/couchbase/etc/couchdb/local.ini"], <0.11675.0>]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:52.920,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.11678.0>}, {name,collation_driver}, {mfargs,{couch_drv,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:47:52.920,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.11679.0>}, {name,couch_task_events}, {mfargs, {gen_event,start_link,[{local,couch_task_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:52.920,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.11680.0>}, {name,couch_task_status}, {mfargs,{couch_task_status,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:52.920,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.11681.0>}, {name,couch_file_write_guard}, {mfargs,{couch_file_write_guard,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.310,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.11682.0>}, {name,couch_server}, {mfargs,{couch_server,sup_start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.310,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.17851.0>}, {name,couch_db_update_event}, {mfargs, {gen_event,start_link,[{local,couch_db_update}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.311,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.17852.0>}, {name,couch_replication_event}, {mfargs, {gen_event,start_link,[{local,couch_replication}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.311,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.17853.0>}, {name,couch_replication_supervisor}, {mfargs,{couch_rep_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:47:56.311,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.17854.0>}, {name,couch_log}, {mfargs,{couch_log,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.311,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.17855.0>}, {name,couch_main_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_main_index_barrier, "max_parallel_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.311,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.17856.0>}, {name,couch_replica_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_replica_index_barrier, "max_parallel_replica_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.312,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_primary_services} started: [{pid,<0.17857.0>}, {name,couch_spatial_index_barrier}, {mfargs, {couch_index_barrier,start_link, [couch_spatial_index_barrier, "max_parallel_spatial_indexers"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.312,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.11677.0>}, {name,couch_primary_services}, {mfargs,{couch_primary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:47:56.312,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.17859.0>}, {name,couch_db_update_notifier_sup}, {mfargs,{couch_db_update_notifier_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:47:56.312,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.17860.0>}, {name,auth_cache}, {mfargs,{couch_auth_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.313,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.17862.0>}, {name,set_view_manager}, {mfargs,{couch_set_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.313,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.17864.0>}, {name,spatial_manager}, {mfargs,{couch_spatial,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.313,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.17866.0>}, {name,index_merger_pool}, {mfargs, {lhttpc_manager,start_link, [[{connection_timeout,90000}, {pool_size,10000}, {name,couch_index_merger_connection_pool}]]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.313,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.17867.0>}, {name,query_servers}, {mfargs,{couch_query_servers,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.313,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.17869.0>}, {name,couch_set_view_ddoc_cache}, {mfargs,{couch_set_view_ddoc_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.313,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.17871.0>}, {name,view_manager}, {mfargs,{couch_view,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.315,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.17873.0>}, {name,httpd}, {mfargs,{couch_httpd,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.315,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_secondary_services} started: [{pid,<0.17890.0>}, {name,uuids}, {mfargs,{couch_uuids,start,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.315,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,couch_server_sup} started: [{pid,<0.17858.0>}, {name,couch_secondary_services}, {mfargs,{couch_secondary_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:47:56.315,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,cb_couch_sup} started: [{pid,<0.11676.0>}, {name,couch_app}, {mfargs, {couch_app,start, [fake, ["/opt/couchbase/etc/couchdb/default.ini", "/opt/couchbase/etc/couchdb/local.ini"]]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2014-08-19T16:47:56.319,ns_1@127.0.0.1:ns_server_cluster_sup<0.161.0>:log_os_info:start_link:25]OS type: {unix,linux} Version: {2,6,32} Runtime info: [{otp_release,"R14B04"}, {erl_version,"5.8.5"}, {erl_version_long, "Erlang R14B04 (erts-5.8.5) [source] [64-bit] [smp:24:24] [rq:24] [async-threads:16] [kernel-poll:true]\n"}, {system_arch_raw,"x86_64-unknown-linux-gnu"}, {system_arch,"x86_64-unknown-linux-gnu"}, {localtime,{{2014,8,19},{16,47,56}}}, {memory, [{total,601336592}, {processes,39634848}, {processes_used,39627880}, {system,561701744}, {atom,1157217}, {atom_used,1136504}, {binary,341768}, {code,10898757}, {ets,2278496}]}, {loaded, [lib,capi_utils,stats_collector, menelaus_web_remote_clusters,mb_grid,ejson, mochiweb_response,menelaus_web_buckets,menelaus_auth, mochiweb_util,mochiweb_request,mochiweb_headers, set_view_update_daemon,xdc_rdoc_replication_srv, compaction_daemon,stats_archiver,ns_bucket_sup, ns_bucket_worker_sup,couch_changes, ns_memcached_sockets_pool,xdc_rep_manager, ns_null_connection_pool,proxied_memcached_clients_pool, ns_moxi_sup,ns_connection_pool,memcached_clients_pool, ns_cluster_membership,ns_memcached_log_rotator, mc_tcp_listener,mc_conn_sup,mc_sup, menelaus_web_alerts_srv,hot_keys_keeper,menelaus_event, menelaus_stats_gatherer,menelaus_web_cache, menelaus_ui_auth,ssl_tls1,ssl_cipher,ssl_record,mochiweb, menelaus_util,menelaus_web,ns_ports_setup,ns_server_cert, ns_ssl_services_setup,ns_ssl_services_sup,menelaus_sup, ringbuffer,master_activity_events_keeper, master_activity_events_pids_watcher,auto_failover,ns_tick, ns_online_config_upgrader,ns_orchestrator, master_activity_events,system_stats_collector, mb_master_sup,failover_safeness_level,gen_fsm,mb_master, xdc_replication_sup,ns_bucket,remote_clusters_info, stats_reader,ns_doctor,ns_heart,samples_loader_tasks, ns_mail_log,ns_mail_sup,bucket_info_cache, vbucket_map_mirror,ns_node_disco_rep_events,ns_config_rep, ns_node_disco_conf_events,ns_node_disco_log,net_adm, cluster_compat_mode,ns_node_disco,ns_node_disco_sup, ns_memcached,dist_util,ns_config_isasl_sync,ns_crash_log, ns_config_ets_dup,random,timer2,ns_log,request_throttler, menelaus_deps,dir_size,work_queue,supervisor2, ns_server_sup,ns_process_registry,cb_config_couch_sync, ns_pubsub,ns_config_replica,ns_config_log,vclock, ns_storage_conf,ns_config_default,ns_config,ns_config_sup, ns_cluster,ns_cookie_manager,erl_epmd,inet_tcp_dist, gen_udp,dist_manager,timeout_diag_logger,path_config, diag_handler,auth,ns_info,log_os_info,couch_config_writer, cb_init_loggers,couch_uuids,mochiweb_acceptor,inet_tcp, gen_tcp,mochiweb_socket,mochiweb_socket_server,mochilists, mochiweb_http,eval_bits,couch_httpd,couch_view, couch_set_view_ddoc_cache,couch_query_servers, couch_spatial,mapreduce,couch_set_view, couch_db_update_notifier,snappy,couch_compress, couch_auth_cache,couch_db_update_notifier_sup, couch_secondary_sup,queue,couch_index_barrier, couch_event_sup,couch_log,couch_rep_sup,couch_btree, couch_ref_counter,couch_db_updater,couch_db,httpd_util, filelib,couch_file,couch_file_write_guard, couch_task_status,erl_ddll,couch_drv,couch_primary_sup, couch_server,string,re,file2,couch_util,couch_config, couch_server_sup,ssl_server,crypto,ssl,lhttpc_manager, lhttpc_sup,lhttpc,ssl_connection_sup,ssl_session_cache, ssl_certificate_db,ssl_manager,ssl_broker_sup,ssl_sup, ssl_app,tftp_sup,httpd_sup,httpc_handler_sup,httpc_cookie, inets,httpc_manager,httpc,httpc_profile_sup,httpc_sup, ftp_sup,inets_sup,inets_app,crypto_server,crypto_sup, crypto_app,couch_app,cb_couch_sup,ns_server_cluster_sup, mlockall,calendar,ale_default_formatter,otp_internal,misc, 'ale_logger-xdcr','ale_logger-mapreduce_errors', 'ale_logger-views',timer,io_lib_fread, 'ale_logger-cluster','ale_logger-rebalance', 'ale_logger-stats','ale_logger-ns_doctor', 'ale_logger-menelaus','ale_logger-user', 'ale_logger-ns_server','ale_logger-couchdb',ns_log_sink, disk_log_sup,disk_log_server,disk_log_1,disk_log, ale_disk_sink,ns_server,cpu_sup,memsup,disksup,os_mon,io, release_handler,overload,alarm_handler,log_mf_h,sasl, ale_error_logger_handler,'ale_logger-ale_logger', 'ale_logger-error_logger',beam_opcodes,beam_dict,beam_asm, beam_validator,beam_flatten,beam_trim,beam_receive, beam_bsm,beam_peep,beam_dead,beam_type,beam_bool, beam_clean,beam_utils,beam_jump,beam_block,v3_codegen, v3_life,v3_kernel,sys_core_dsetel,erl_bifs,sys_core_fold, cerl_trees,sys_core_inline,core_lib,cerl,v3_core,erl_bits, erl_expand_records,sys_pre_expand,sofs,erl_internal,sets, ordsets,erl_lint,compile,dynamic_compile,ale_utils, io_lib_pretty,io_lib_format,io_lib,ale_codegen,dict,ale, ale_dynamic_sup,ale_sup,ale_app,ns_bootstrap,child_erlang, file_io_server,orddict,erl_eval,file,c,kernel_config, user_sup,supervisor_bridge,standard_error,unicode,binary, ets,gb_sets,hipe_unified_loader,packages,code_server,code, file_server,net_kernel,global_group,erl_distribution, filename,inet_gethost_native,os,inet_parse,inet,inet_udp, inet_config,inet_db,global,gb_trees,rpc,supervisor,kernel, application_master,sys,application,gen_server,erl_parse, proplists,erl_scan,lists,application_controller,proc_lib, gen,gen_event,error_logger,heart,error_handler,erlang, erl_prim_loader,prim_zip,zlib,prim_file,prim_inet,init, otp_ring0]}, {applications, [{public_key,"Public key infrastructure","0.13"}, {asn1,"The Erlang ASN1 compiler version 1.6.18","1.6.18"}, {lhttpc,"Lightweight HTTP Client","1.3.0"}, {ale,"Another Logger for Erlang","8ca6d2a"}, {os_mon,"CPO CXC 138 46","2.2.7"}, {couch_set_view,"Set views","1.2.0a-a425d97-git"}, {compiler,"ERTS CXC 138 10","4.7.5"}, {inets,"INETS CXC 138 49","5.7.1"}, {couch,"Apache CouchDB","1.2.0a-a425d97-git"}, {mapreduce,"MapReduce using V8 JavaScript engine","1.0.0"}, {couch_index_merger,"Index merger","1.2.0a-a425d97-git"}, {kernel,"ERTS CXC 138 10","2.14.5"}, {crypto,"CRYPTO version 2","2.0.4"}, {ssl,"Erlang/OTP SSL application","4.1.6"}, {sasl,"SASL CXC 138 11","2.1.10"}, {couch_view_parser,"Couch view parser","1.0.0"}, {ns_server,"Couchbase server","2.5.1-1083-rel-enterprise"}, {mochiweb,"MochiMedia Web Server","2.4.2"}, {syntax_tools,"Syntax tools","1.6.7.1"}, {xmerl,"XML parser","1.2.10"}, {oauth,"Erlang OAuth implementation","7d85d3ef"}, {stdlib,"ERTS CXC 138 10","1.17.5"}]}, {pre_loaded, [erlang,erl_prim_loader,prim_zip,zlib,prim_file,prim_inet, init,otp_ring0]}, {process_count,6319}, {node,'ns_1@127.0.0.1'}, {nodes,[]}, {registered, [disk_log_sup,disk_log_server,httpc_sup,ssl_broker_sup, code_server,httpc_profile_sup,couch_set_view_ddoc_cache, httpc_manager,ssl_server,inet_gethost_native_sup, httpc_handler_sup,ssl_sup,application_controller, couch_index_merger_connection_pool,ftp_sup,couch_spatial, standard_error_sup,inets_sup,crypto_server,crypto_sup, couch_secondary_services,couch_primary_services, couch_db_update,couch_config,error_logger,couch_server, couch_uuids,'sink-disk_default',os_mon_sup,cpu_sup,memsup, disksup,kernel_safe_sup,auth,couch_db_update_notifier_sup, dist_manager,couch_log,couch_auth_cache,couch_rep_sup, sasl_safe_sup,couch_view,couch_server_sup,cb_couch_sup, timer_server,couch_query_servers,couch_task_status, couch_httpd,couch_drv,rex,couch_file_write_guard,net_sup, kernel_sup,global_name_server,sasl_sup,net_kernel, file_server_2,release_handler,overload,alarm_handler, ale_sup,ale_dynamic_sup,lhttpc_sup,ale, couch_spatial_index_barrier,couch_replica_index_barrier, couch_main_index_barrier,couch_set_view,couch_replication, couch_task_events,lhttpc_manager,timer2_server,tftp_sup, ns_server_cluster_sup,standard_error,erl_prim_loader, inet_gethost_native,init,inet_db,httpd_sup,'sink-ns_log', 'sink-disk_stats','sink-disk_xdcr_errors', 'sink-disk_xdcr','sink-disk_debug','sink-disk_couchdb', 'sink-disk_mapreduce_errors','sink-disk_views', global_group,'sink-disk_error',ssl_connection_sup, ssl_manager,erl_epmd]}, {cookie,xyzevwdfypcplvpp}, {wordsize,8}, {wall_clock,4023}] [ns_server:info,2014-08-19T16:47:56.321,ns_1@127.0.0.1:ns_server_cluster_sup<0.161.0>:log_os_info:start_link:27]Manifest: ["","", " ", " ", " ", " ", " ", " ", " "," ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " ", " "," "] [error_logger:info,2014-08-19T16:47:56.322,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.17892.0>}, {name,timeout_diag_logger}, {mfargs,{timeout_diag_logger,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T16:47:56.322,ns_1@127.0.0.1:ns_config_sup<0.17895.0>:ns_config_sup:init:32]loading static ns_config from "/opt/couchbase/etc/couchbase/config" [error_logger:info,2014-08-19T16:47:56.322,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.17893.0>}, {name,ns_cookie_manager}, {mfargs,{ns_cookie_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.322,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.17894.0>}, {name,ns_cluster}, {mfargs,{ns_cluster,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.322,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.17896.0>}, {name,ns_config_events}, {mfargs, {gen_event,start_link,[{local,ns_config_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.322,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.17897.0>}, {name,ns_config_events_local}, {mfargs, {gen_event,start_link, [{local,ns_config_events_local}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2014-08-19T16:47:56.323,ns_1@127.0.0.1:ns_config<0.17898.0>:ns_config:load_config:795]Loading static config from "/opt/couchbase/etc/couchbase/config" [ns_server:info,2014-08-19T16:47:56.323,ns_1@127.0.0.1:ns_config<0.17898.0>:ns_config:load_config:809]Loading dynamic config from "/opt/couchbase/var/lib/couchbase/config/config.dat" [ns_server:debug,2014-08-19T16:47:56.324,ns_1@127.0.0.1:ns_config<0.17898.0>:ns_config:load_config:816]Here's full dynamic config we loaded: [[{dynamic_config_version, [{'_vclock',[{'ns_1@127.0.0.1',{5,63575667447}}]},2,5]}, {alert_limits,[{max_overhead_perc,50},{max_disk_used,90}]}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667445}}]}, {enabled,false}, {timeout,120}, {max_nodes,1}, {count,0}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {buckets,[{configs,[]}]}, {cert_and_pkey, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667449}}]}| {<<"-----BEGIN CERTIFICATE-----\nMIICmDCCAYKgAwIBAgIIE4vQOGMt4U8wCwYJKoZIhvcNAQEFMAwxCjAIBgNVBAMT\nASowHhcNMTMwMTAxMDAwMDAwWhcNNDkxMjMxMjM1OTU5WjAMMQowCAYDVQQDEwEq\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5WgTuSJMU8qPdc8uDdst\nav13oFxDpbqz8mIk7TVReVHwO9MvKgi8cqlGev50BaQNfzFW41E/baDmpa8sAlSe\nzPoGcRD5wDJdHRH87FdW8eeE4rA8N9TcsSyJDo0gmWO+Vj+ow5dzF87001UstU6A\n5UQ5anT0dGnKLChpmk0KiKx28+XSnycDQ8osiLRqVU6TF0w4j56YEsiio7JKXdj2\nxytV+84pRrHIxtwy5wbd7M8d61WQqjR5dKc8onmDdDg57RBIgnZze7tOk53eX3Cn\n5/34jyitKxDkVEjMzHsSiBdZjTDegLqmKpxOAiokPEsHxn4XenZyacOM/gcJ6/j+\nfQIDAQABowIwADALBgkqhkiG9w0BAQUDggEBAJDQBZGy6r2NS8CERBUZ5W7ks0uh\npNwXgErwxU3srGRDoLKyN4d2VFo/xoQfmRFryX1M+aVwtfzbnTr0Z6AlRV19I2Zx\n8OCR6Zmk0gHdHbCAoDO8Yu78lbl+yv13SrjzrUpNXG67Zx6thc8Ea7KvCezSSsVM\ngJuJE2Chr0sADmqCfWPeJq/mujZKqDKKl06Myr++6XQ0oOekSnPfyCREcSSoHe9r\nX+ORwhpHb31vtNZc7cRRmfqBUKDmp8QDNj+539k03qyjxVhCGJQcOsPCFjWQBMJD\noyh2dK6J6E43Or43v2sEnMBtMcLl0UEymNnyxDCL2+tjlDn92nMM2fngk4Q=\n-----END CERTIFICATE-----\n">>, <<"*****">>}]}, {cluster_compat_version, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667447}}]},2,5]}, {drop_request_memory_threshold_mib,undefined}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667445}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}]}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {index_aware_rebalance_disabled,false}, {max_bucket_count,10}, {memory_quota,58026}, {nodes_wanted,['ns_1@127.0.0.1']}, {otp, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667446}}]}, {cookie,xyzevwdfypcplvpp}]}, {remote_clusters,[]}, {replication,[{enabled,true}]}, {replication_topology,star}, {rest,[{port,8091}]}, {rest_creds,[{creds,[]}]}, {server_groups, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667447}}]}, [{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@127.0.0.1']}]]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {uuid, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667480}}]}| <<"9032e293d656a8b04683554c561fe06f">>]}, {vbucket_map_history,[{'_vclock',[{'ns_1@127.0.0.1',{1,63575667447}}]}]}, {{couchdb,max_parallel_indexers},4}, {{couchdb,max_parallel_replica_indexers},2}, {{request_limit,capi},undefined}, {{request_limit,rest},undefined}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{7,63575667445}}]}|{2,3,0}]}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock', [{'ns_1@127.0.0.1',{1,63575667445}}, {<<"d026346e5ade950537d992bcb312c471">>,{1,63575667445}}]}, {path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock', [{'ns_1@127.0.0.1',{3,63575667445}}, {<<"d026346e5ade950537d992bcb312c471">>,{1,63575667445}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock', [{'ns_1@127.0.0.1',{1,63575667445}}, {<<"d026346e5ade950537d992bcb312c471">>,{1,63575667445}}]}, {filename,"/opt/couchbase/var/lib/couchbase/ns_log"}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{3,63575667445}}]}, {moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B","binary", "-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}]}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',ssl_capi_port},18092}, {{node,'ns_1@127.0.0.1',ssl_proxy_downstream_port},11214}, {{node,'ns_1@127.0.0.1',ssl_proxy_upstream_port},11215}, {{node,'ns_1@127.0.0.1',ssl_rest_port},18091}]] [ns_server:info,2014-08-19T16:47:56.328,ns_1@127.0.0.1:ns_config<0.17898.0>:ns_config:load_config:827]Here's full dynamic config we loaded + static & default config: [{{node,'ns_1@127.0.0.1',ssl_rest_port},18091}, {{node,'ns_1@127.0.0.1',ssl_proxy_upstream_port},11215}, {{node,'ns_1@127.0.0.1',ssl_proxy_downstream_port},11214}, {{node,'ns_1@127.0.0.1',ssl_capi_port},18092}, {{node,'ns_1@127.0.0.1',rest},[{port,8091},{port_meta,global}]}, {{node,'ns_1@127.0.0.1',port_servers}, [{'_vclock',[{'ns_1@127.0.0.1',{3,63575667445}}]}, {moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B","binary", "-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env, [{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol,stream]}]}, {{node,'ns_1@127.0.0.1',ns_log}, [{'_vclock', [{'ns_1@127.0.0.1',{1,63575667445}}, {<<"d026346e5ade950537d992bcb312c471">>,{1,63575667445}}]}, {filename,"/opt/couchbase/var/lib/couchbase/ns_log"}]}, {{node,'ns_1@127.0.0.1',moxi},[{port,11211},{verbosity,[]}]}, {{node,'ns_1@127.0.0.1',memcached}, [{'_vclock', [{'ns_1@127.0.0.1',{3,63575667445}}, {<<"d026346e5ade950537d992bcb312c471">>,{1,63575667445}}]}, {mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}]}, {{node,'ns_1@127.0.0.1',membership},active}, {{node,'ns_1@127.0.0.1',isasl}, [{'_vclock', [{'ns_1@127.0.0.1',{1,63575667445}}, {<<"d026346e5ade950537d992bcb312c471">>,{1,63575667445}}]}, {path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}]}, {{node,'ns_1@127.0.0.1',config_version}, [{'_vclock',[{'ns_1@127.0.0.1',{7,63575667445}}]}|{2,3,0}]}, {{node,'ns_1@127.0.0.1',compaction_daemon}, [{check_interval,30},{min_file_size,131072}]}, {{node,'ns_1@127.0.0.1',capi_port},8092}, {{request_limit,rest},undefined}, {{request_limit,capi},undefined}, {{couchdb,max_parallel_replica_indexers},2}, {{couchdb,max_parallel_indexers},4}, {vbucket_map_history,[{'_vclock',[{'ns_1@127.0.0.1',{1,63575667447}}]}]}, {uuid, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667480}}]}| <<"9032e293d656a8b04683554c561fe06f">>]}, {set_view_update_daemon, [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}]}, {server_groups, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667447}}]}, [{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@127.0.0.1']}]]}, {rest_creds,[{creds,[]}]}, {rest,[{port,8091}]}, {replication_topology,star}, {replication,[{enabled,true}]}, {remote_clusters,[]}, {otp, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667446}}]}, {cookie,xyzevwdfypcplvpp}]}, {nodes_wanted,['ns_1@127.0.0.1']}, {memory_quota,58026}, {max_bucket_count,10}, {index_aware_rebalance_disabled,false}, {fast_warmup, [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}]}, {email_alerts, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667445}}]}, {recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server, [{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts, [auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}]}, {drop_request_memory_threshold_mib,undefined}, {cluster_compat_version, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667447}}]},2,5]}, {cert_and_pkey, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667449}}]}| {<<"-----BEGIN CERTIFICATE-----\nMIICmDCCAYKgAwIBAgIIE4vQOGMt4U8wCwYJKoZIhvcNAQEFMAwxCjAIBgNVBAMT\nASowHhcNMTMwMTAxMDAwMDAwWhcNNDkxMjMxMjM1OTU5WjAMMQowCAYDVQQDEwEq\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5WgTuSJMU8qPdc8uDdst\nav13oFxDpbqz8mIk7TVReVHwO9MvKgi8cqlGev50BaQNfzFW41E/baDmpa8sAlSe\nzPoGcRD5wDJdHRH87FdW8eeE4rA8N9TcsSyJDo0gmWO+Vj+ow5dzF87001UstU6A\n5UQ5anT0dGnKLChpmk0KiKx28+XSnycDQ8osiLRqVU6TF0w4j56YEsiio7JKXdj2\nxytV+84pRrHIxtwy5wbd7M8d61WQqjR5dKc8onmDdDg57RBIgnZze7tOk53eX3Cn\n5/34jyitKxDkVEjMzHsSiBdZjTDegLqmKpxOAiokPEsHxn4XenZyacOM/gcJ6/j+\nfQIDAQABowIwADALBgkqhkiG9w0BAQUDggEBAJDQBZGy6r2NS8CERBUZ5W7ks0uh\npNwXgErwxU3srGRDoLKyN4d2VFo/xoQfmRFryX1M+aVwtfzbnTr0Z6AlRV19I2Zx\n8OCR6Zmk0gHdHbCAoDO8Yu78lbl+yv13SrjzrUpNXG67Zx6thc8Ea7KvCezSSsVM\ngJuJE2Chr0sADmqCfWPeJq/mujZKqDKKl06Myr++6XQ0oOekSnPfyCREcSSoHe9r\nX+ORwhpHb31vtNZc7cRRmfqBUKDmp8QDNj+539k03qyjxVhCGJQcOsPCFjWQBMJD\noyh2dK6J6E43Or43v2sEnMBtMcLl0UEymNnyxDCL2+tjlDn92nMM2fngk4Q=\n-----END CERTIFICATE-----\n">>, <<"*****">>}]}, {buckets,[{configs,[]}]}, {autocompaction, [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}]}, {auto_failover_cfg, [{'_vclock',[{'ns_1@127.0.0.1',{1,63575667445}}]}, {enabled,false}, {timeout,120}, {max_nodes,1}, {count,0}]}, {alert_limits,[{max_overhead_perc,50},{max_disk_used,90}]}, {dynamic_config_version, [{'_vclock',[{'ns_1@127.0.0.1',{5,63575667447}}]},2,5]}] [error_logger:info,2014-08-19T16:47:56.330,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.17898.0>}, {name,ns_config}, {mfargs, {ns_config,start_link, ["/opt/couchbase/etc/couchbase/config", ns_config_default]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.330,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.17900.0>}, {name,ns_config_remote}, {mfargs, {ns_config_replica,start_link, [{local,ns_config_remote}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.330,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.17901.0>}, {name,ns_config_log}, {mfargs,{ns_config_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.330,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_config_sup} started: [{pid,<0.17903.0>}, {name,cb_config_couch_sync}, {mfargs,{cb_config_couch_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.330,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.17895.0>}, {name,ns_config_sup}, {mfargs,{ns_config_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:info,2014-08-19T16:47:56.331,ns_1@127.0.0.1:ns_server_sup<0.17906.0>:dir_size:start_link:47]Starting quick version of dir_size with program name: i386-linux-godu [error_logger:info,2014-08-19T16:47:56.331,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.17905.0>}, {name,vbucket_filter_changes_registry}, {mfargs, {ns_process_registry,start_link, [vbucket_filter_changes_registry]}}, {restart_type,permanent}, {shutdown,100}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.331,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17907.0>}, {name,diag_handler_worker}, {mfa,{work_queue,start_link,[diag_handler_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.331,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17908.0>}, {name,dir_size}, {mfa,{dir_size,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.331,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17909.0>}, {name,request_throttler}, {mfa,{request_throttler,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.332,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17910.0>}, {name,ns_log}, {mfa,{ns_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.332,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17911.0>}, {name,ns_crash_log_consumer}, {mfa,{ns_log,start_link_crash_consumer,[]}}, {restart_type,{permanent,4}}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.332,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: alert_limits -> [{max_overhead_perc,50},{max_disk_used,90}] [error_logger:info,2014-08-19T16:47:56.332,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17912.0>}, {name,ns_config_ets_dup}, {mfa,{ns_config_ets_dup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.332,ns_1@127.0.0.1:ns_config_isasl_sync<0.17914.0>:ns_config_isasl_sync:init:63]isasl_sync init: ["/opt/couchbase/var/lib/couchbase/isasl.pw","_admin", "051984933ac39a02e4056d80a45e8c36"] [ns_server:debug,2014-08-19T16:47:56.332,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: auto_failover_cfg -> [{enabled,false},{timeout,120},{max_nodes,1},{count,0}] [ns_server:debug,2014-08-19T16:47:56.332,ns_1@127.0.0.1:ns_config_isasl_sync<0.17914.0>:ns_config_isasl_sync:init:71]isasl_sync init buckets: [] [ns_server:debug,2014-08-19T16:47:56.332,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: autocompaction -> [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T16:47:56.332,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[]}] [ns_server:debug,2014-08-19T16:47:56.333,ns_1@127.0.0.1:ns_config_isasl_sync<0.17914.0>:ns_config_isasl_sync:writeSASLConf:143]Writing isasl passwd file: "/opt/couchbase/var/lib/couchbase/isasl.pw" [ns_server:debug,2014-08-19T16:47:56.333,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: cert_and_pkey -> {<<"-----BEGIN CERTIFICATE-----\nMIICmDCCAYKgAwIBAgIIE4vQOGMt4U8wCwYJKoZIhvcNAQEFMAwxCjAIBgNVBAMT\nASowHhcNMTMwMTAxMDAwMDAwWhcNNDkxMjMxMjM1OTU5WjAMMQowCAYDVQQDEwEq\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5WgTuSJMU8qPdc8uDdst\nav13oFxDpbqz8mIk7TVReVHwO9MvKgi8cqlGev50BaQNfzFW41E/baDmpa8sAlSe\nzPoGcRD5wDJdHRH87FdW8eeE4rA8N9TcsSyJDo0gmWO+Vj+ow5dzF87001UstU6A\n5UQ5anT0dGnKLChpmk0KiKx28+XSnycDQ8osiLR"...>>, <<"*****">>} [ns_server:debug,2014-08-19T16:47:56.333,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: cluster_compat_version -> [2,5] [ns_server:debug,2014-08-19T16:47:56.333,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: drop_request_memory_threshold_mib -> undefined [ns_server:debug,2014-08-19T16:47:56.333,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: dynamic_config_version -> [2,5] [ns_server:debug,2014-08-19T16:47:56.333,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: email_alerts -> [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server,[{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}] [ns_server:debug,2014-08-19T16:47:56.333,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: fast_warmup -> [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}] [ns_server:debug,2014-08-19T16:47:56.333,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: index_aware_rebalance_disabled -> false [ns_server:debug,2014-08-19T16:47:56.333,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: max_bucket_count -> 10 [ns_server:debug,2014-08-19T16:47:56.333,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: memory_quota -> 58026 [ns_server:debug,2014-08-19T16:47:56.334,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@127.0.0.1'] [ns_server:debug,2014-08-19T16:47:56.334,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: otp -> [{cookie,xyzevwdfypcplvpp}] [ns_server:debug,2014-08-19T16:47:56.334,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: remote_clusters -> [] [ns_server:debug,2014-08-19T16:47:56.334,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: replication -> [{enabled,true}] [ns_server:debug,2014-08-19T16:47:56.334,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: replication_topology -> star [ns_server:debug,2014-08-19T16:47:56.334,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: rest -> [{port,8091}] [ns_server:info,2014-08-19T16:47:56.334,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:handle_info:63]config change: rest_creds -> ******** [ns_server:debug,2014-08-19T16:47:56.334,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: server_groups -> [[{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@127.0.0.1']}]] [ns_server:debug,2014-08-19T16:47:56.334,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: set_view_update_daemon -> [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}] [ns_server:debug,2014-08-19T16:47:56.334,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: uuid -> <<"9032e293d656a8b04683554c561fe06f">> [ns_server:debug,2014-08-19T16:47:56.336,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: vbucket_map_history -> [] [ns_server:debug,2014-08-19T16:47:56.336,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_indexers} -> 4 [ns_server:debug,2014-08-19T16:47:56.336,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_replica_indexers} -> 2 [ns_server:debug,2014-08-19T16:47:56.336,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {request_limit,capi} -> undefined [ns_server:debug,2014-08-19T16:47:56.336,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {request_limit,rest} -> undefined [ns_server:debug,2014-08-19T16:47:56.336,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',capi_port} -> 8092 [ns_server:debug,2014-08-19T16:47:56.336,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T16:47:56.337,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',config_version} -> {2,3,0} [ns_server:debug,2014-08-19T16:47:56.337,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T16:47:56.337,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',membership} -> active [ns_server:debug,2014-08-19T16:47:56.338,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T16:47:56.338,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T16:47:56.338,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [ns_server:debug,2014-08-19T16:47:56.338,ns_1@127.0.0.1:ns_node_disco<0.17920.0>:ns_node_disco:init:103]Initting ns_node_disco with [] [error_logger:info,2014-08-19T16:47:56.338,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17914.0>}, {name,ns_config_isasl_sync}, {mfa,{ns_config_isasl_sync,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.338,ns_1@127.0.0.1:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [error_logger:info,2014-08-19T16:47:56.338,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17917.0>}, {name,ns_log_events}, {mfa,{gen_event,start_link,[{local,ns_log_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.338,ns_1@127.0.0.1:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T16:47:56.338,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T16:47:56.338,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',rest} -> [{port,8091},{port_meta,global}] [error_logger:info,2014-08-19T16:47:56.338,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.17919.0>}, {name,ns_node_disco_events}, {mfargs, {gen_event,start_link, [{local,ns_node_disco_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.339,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T16:47:56.339,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T16:47:56.339,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T16:47:56.339,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_rest_port} -> 18091 [ns_server:debug,2014-08-19T16:47:56.367,ns_1@127.0.0.1:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T16:47:56.367,ns_1@127.0.0.1:<0.17921.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:47:56.367,ns_1@127.0.0.1:<0.17921.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: xyzevwdfypcplvpp [error_logger:info,2014-08-19T16:47:56.367,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.17920.0>}, {name,ns_node_disco}, {mfargs,{ns_node_disco,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.367,ns_1@127.0.0.1:ns_config_rep<0.17926.0>:ns_config_rep:init:66]init pulling [ns_server:debug,2014-08-19T16:47:56.367,ns_1@127.0.0.1:ns_config_rep<0.17926.0>:ns_config_rep:init:68]init pushing [error_logger:info,2014-08-19T16:47:56.367,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.17923.0>}, {name,ns_node_disco_log}, {mfargs,{ns_node_disco_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.368,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.17924.0>}, {name,ns_node_disco_conf_events}, {mfargs,{ns_node_disco_conf_events,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.368,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.17925.0>}, {name,ns_config_rep_merger}, {mfargs,{ns_config_rep,start_link_merger,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.369,ns_1@127.0.0.1:ns_config_rep<0.17926.0>:ns_config_rep:init:72]init reannouncing [ns_server:debug,2014-08-19T16:47:56.369,ns_1@127.0.0.1:ns_config_events<0.17896.0>:ns_node_disco_conf_events:handle_event:44]ns_node_disco_conf_events config on nodes_wanted [ns_server:debug,2014-08-19T16:47:56.369,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: alert_limits -> [{max_overhead_perc,50},{max_disk_used,90}] [ns_server:debug,2014-08-19T16:47:56.369,ns_1@127.0.0.1:ns_config_events<0.17896.0>:ns_node_disco_conf_events:handle_event:50]ns_node_disco_conf_events config on otp [ns_server:debug,2014-08-19T16:47:56.369,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: auto_failover_cfg -> [{enabled,false},{timeout,120},{max_nodes,1},{count,0}] [ns_server:debug,2014-08-19T16:47:56.369,ns_1@127.0.0.1:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [error_logger:info,2014-08-19T16:47:56.369,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_node_disco_sup} started: [{pid,<0.17926.0>}, {name,ns_config_rep}, {mfargs,{ns_config_rep,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.370,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: autocompaction -> [{database_fragmentation_threshold,{30,undefined}}, {view_fragmentation_threshold,{30,undefined}}] [ns_server:debug,2014-08-19T16:47:56.370,ns_1@127.0.0.1:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T16:47:56.369,ns_1@127.0.0.1:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([alert_limits,auto_failover_cfg,autocompaction, buckets,cert_and_pkey,cluster_compat_version, drop_request_memory_threshold_mib, dynamic_config_version,email_alerts, fast_warmup,index_aware_rebalance_disabled, max_bucket_count,memory_quota,nodes_wanted,otp, remote_clusters,replication, replication_topology,rest,rest_creds, server_groups,set_view_update_daemon,uuid, vbucket_map_history, {couchdb,max_parallel_indexers}, {couchdb,max_parallel_replica_indexers}, {request_limit,capi}, {request_limit,rest}, {node,'ns_1@127.0.0.1',capi_port}, {node,'ns_1@127.0.0.1',compaction_daemon}, {node,'ns_1@127.0.0.1',config_version}, {node,'ns_1@127.0.0.1',isasl}, {node,'ns_1@127.0.0.1',membership}, {node,'ns_1@127.0.0.1',memcached}, {node,'ns_1@127.0.0.1',moxi}, {node,'ns_1@127.0.0.1',ns_log}, {node,'ns_1@127.0.0.1',port_servers}, {node,'ns_1@127.0.0.1',rest}, {node,'ns_1@127.0.0.1',ssl_capi_port}, {node,'ns_1@127.0.0.1', ssl_proxy_downstream_port}, {node,'ns_1@127.0.0.1',ssl_proxy_upstream_port}, {node,'ns_1@127.0.0.1',ssl_rest_port}]..) [error_logger:info,2014-08-19T16:47:56.370,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17918.0>}, {name,ns_node_disco_sup}, {mfa,{ns_node_disco_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2014-08-19T16:47:56.370,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[]}] [ns_server:debug,2014-08-19T16:47:56.370,ns_1@127.0.0.1:ns_log_events<0.17917.0>:ns_mail_log:init:44]ns_mail_log started up [error_logger:info,2014-08-19T16:47:56.370,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17929.0>}, {name,vbucket_map_mirror}, {mfa,{vbucket_map_mirror,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.370,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: cert_and_pkey -> {<<"-----BEGIN CERTIFICATE-----\nMIICmDCCAYKgAwIBAgIIE4vQOGMt4U8wCwYJKoZIhvcNAQEFMAwxCjAIBgNVBAMT\nASowHhcNMTMwMTAxMDAwMDAwWhcNNDkxMjMxMjM1OTU5WjAMMQowCAYDVQQDEwEq\nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA5WgTuSJMU8qPdc8uDdst\nav13oFxDpbqz8mIk7TVReVHwO9MvKgi8cqlGev50BaQNfzFW41E/baDmpa8sAlSe\nzPoGcRD5wDJdHRH87FdW8eeE4rA8N9TcsSyJDo0gmWO+Vj+ow5dzF87001UstU6A\n5UQ5anT0dGnKLChpmk0KiKx28+XSnycDQ8osiLR"...>>, <<"*****">>} [error_logger:info,2014-08-19T16:47:56.370,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17933.0>}, {name,bucket_info_cache}, {mfa,{bucket_info_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.370,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: cluster_compat_version -> [2,5] [ns_server:debug,2014-08-19T16:47:56.370,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.17944.0>:ns_heart:current_status_slow:248]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} [ns_server:debug,2014-08-19T16:47:56.370,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: drop_request_memory_threshold_mib -> undefined [error_logger:info,2014-08-19T16:47:56.370,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17936.0>}, {name,ns_tick_event}, {mfa,{gen_event,start_link,[{local,ns_tick_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.370,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: dynamic_config_version -> [2,5] [ns_server:info,2014-08-19T16:47:56.371,ns_1@127.0.0.1:remote_clusters_info<0.17949.0>:remote_clusters_info:read_or_create_table:540]Reading remote_clusters_info content from /opt/couchbase/var/lib/couchbase/remote_clusters_cache_v3 [error_logger:info,2014-08-19T16:47:56.371,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17937.0>}, {name,buckets_events}, {mfa,{gen_event,start_link,[{local,buckets_events}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.371,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: email_alerts -> [{recipients,["root@localhost"]}, {sender,"couchbase@localhost"}, {enabled,false}, {email_server,[{user,[]}, {pass,"*****"}, {host,"localhost"}, {port,25}, {encrypt,false}]}, {alerts,[auto_failover_node,auto_failover_maximum_reached, auto_failover_other_nodes_down,auto_failover_cluster_too_small,ip, disk,overhead,ep_oom_errors,ep_item_commit_failed]}] [ns_server:debug,2014-08-19T16:47:56.371,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: fast_warmup -> [{fast_warmup_enabled,true}, {min_memory_threshold,10}, {min_items_threshold,10}] [error_logger:info,2014-08-19T16:47:56.371,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_mail_sup} started: [{pid,<0.17939.0>}, {name,ns_mail_log}, {mfargs,{ns_mail_log,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.371,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.17944.0>:ns_heart:grab_local_xdcr_replications:438]Ignoring exception getting xdcr replication infos {exit,{noproc,{gen_server,call,[xdc_replication_sup,which_children,infinity]}}, [{gen_server,call,3}, {xdc_replication_sup,all_local_replication_infos,0}, {ns_heart,grab_local_xdcr_replications,0}, {ns_heart,current_status_slow,0}, {ns_heart,slow_updater_loop,1}, {proc_lib,init_p_do_apply,3}]} [ns_server:debug,2014-08-19T16:47:56.371,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: index_aware_rebalance_disabled -> false [ns_server:debug,2014-08-19T16:47:56.371,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: max_bucket_count -> 10 [error_logger:info,2014-08-19T16:47:56.371,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17938.0>}, {name,ns_mail_sup}, {mfa,{ns_mail_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2014-08-19T16:47:56.371,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: memory_quota -> 58026 [error_logger:info,2014-08-19T16:47:56.371,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17940.0>}, {name,ns_stats_event}, {mfa,{gen_event,start_link,[{local,ns_stats_event}]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.371,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@127.0.0.1'] [ns_server:debug,2014-08-19T16:47:56.371,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: otp -> [{cookie,xyzevwdfypcplvpp}] [ns_server:debug,2014-08-19T16:47:56.371,ns_1@127.0.0.1:ns_server_sup<0.17906.0>:mb_master:check_master_takeover_needed:141]Sending master node question to the following nodes: [] [error_logger:info,2014-08-19T16:47:56.371,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17941.0>}, {name,samples_loader_tasks}, {mfa,{samples_loader_tasks,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.371,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: remote_clusters -> [] [ns_server:debug,2014-08-19T16:47:56.372,ns_1@127.0.0.1:ns_server_sup<0.17906.0>:mb_master:check_master_takeover_needed:143]Got replies: [] [ns_server:debug,2014-08-19T16:47:56.372,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: replication -> [{enabled,true}] [ns_server:debug,2014-08-19T16:47:56.372,ns_1@127.0.0.1:ns_server_sup<0.17906.0>:mb_master:check_master_takeover_needed:149]Was unable to discover master, not going to force mastership takeover [error_logger:info,2014-08-19T16:47:56.372,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17942.0>}, {name,ns_heart}, {mfa,{ns_heart,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.372,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: replication_topology -> star [user:info,2014-08-19T16:47:56.372,ns_1@127.0.0.1:mb_master<0.17956.0>:mb_master:init:86]I'm the only node, so I'm the master. [ns_server:debug,2014-08-19T16:47:56.372,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: rest -> [{port,8091}] [error_logger:info,2014-08-19T16:47:56.372,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17945.0>}, {name,ns_doctor}, {mfa,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T16:47:56.372,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:handle_info:63]config change: rest_creds -> ******** [error_logger:info,2014-08-19T16:47:56.372,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17949.0>}, {name,remote_clusters_info}, {mfa,{remote_clusters_info,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.372,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: server_groups -> [[{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@127.0.0.1']}]] [error_logger:info,2014-08-19T16:47:56.372,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17954.0>}, {name,master_activity_events}, {mfa, {gen_event,start_link, [{local,master_activity_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.372,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: set_view_update_daemon -> [{update_interval,5000}, {update_min_changes,5000}, {replica_update_min_changes,5000}] [ns_server:debug,2014-08-19T16:47:56.372,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: uuid -> <<"9032e293d656a8b04683554c561fe06f">> [ns_server:debug,2014-08-19T16:47:56.372,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.17944.0>:ns_heart:current_status_slow:248]Ignoring failure to grab system stats: {'EXIT',{noproc,{gen_server,call, [{'stats_reader-@system','ns_1@127.0.0.1'}, {latest,"minute"}]}}} [ns_server:debug,2014-08-19T16:47:56.373,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: vbucket_map_history -> [] [ns_server:debug,2014-08-19T16:47:56.373,ns_1@127.0.0.1:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([dynamic_config_version]..) [ns_server:debug,2014-08-19T16:47:56.373,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_indexers} -> 4 [ns_server:debug,2014-08-19T16:47:56.373,ns_1@127.0.0.1:mb_master_sup<0.17958.0>:misc:start_singleton:986]start_singleton(gen_fsm, ns_orchestrator, [], []): started as <0.17959.0> on 'ns_1@127.0.0.1' [ns_server:debug,2014-08-19T16:47:56.373,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.17944.0>:ns_heart:grab_local_xdcr_replications:438]Ignoring exception getting xdcr replication infos {exit,{noproc,{gen_server,call,[xdc_replication_sup,which_children,infinity]}}, [{gen_server,call,3}, {xdc_replication_sup,all_local_replication_infos,0}, {ns_heart,grab_local_xdcr_replications,0}, {ns_heart,current_status_slow,0}, {ns_heart,slow_updater_loop,1}]} [ns_server:debug,2014-08-19T16:47:56.373,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {couchdb,max_parallel_replica_indexers} -> 2 [error_logger:info,2014-08-19T16:47:56.373,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.17959.0>}, {name,ns_orchestrator}, {mfargs,{ns_orchestrator,start_link,[]}}, {restart_type,permanent}, {shutdown,20}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.373,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {request_limit,capi} -> undefined [ns_server:debug,2014-08-19T16:47:56.373,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {request_limit,rest} -> undefined [ns_server:debug,2014-08-19T16:47:56.373,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',capi_port} -> 8092 [ns_server:debug,2014-08-19T16:47:56.373,ns_1@127.0.0.1:mb_master_sup<0.17958.0>:misc:start_singleton:986]start_singleton(gen_server, ns_tick, [], []): started as <0.17964.0> on 'ns_1@127.0.0.1' [ns_server:debug,2014-08-19T16:47:56.373,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [error_logger:info,2014-08-19T16:47:56.373,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.17964.0>}, {name,ns_tick}, {mfargs,{ns_tick,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.374,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',config_version} -> {2,3,0} [ns_server:debug,2014-08-19T16:47:56.374,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T16:47:56.374,ns_1@127.0.0.1:<0.17967.0>:auto_failover:init:134]init auto_failover. [ns_server:debug,2014-08-19T16:47:56.374,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',membership} -> active [ns_server:debug,2014-08-19T16:47:56.374,ns_1@127.0.0.1:mb_master_sup<0.17958.0>:misc:start_singleton:986]start_singleton(gen_server, auto_failover, [], []): started as <0.17967.0> on 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T16:47:56.374,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.17967.0>}, {name,auto_failover}, {mfargs,{auto_failover,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.374,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [error_logger:info,2014-08-19T16:47:56.374,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17956.0>}, {name,mb_master}, {mfa,{mb_master,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2014-08-19T16:47:56.374,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T16:47:56.375,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [error_logger:info,2014-08-19T16:47:56.375,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17968.0>}, {name,master_activity_events_ingress}, {mfa, {gen_event,start_link, [{local,master_activity_events_ingress}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.375,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17969.0>}, {name,master_activity_events_timestamper}, {mfa, {master_activity_events,start_link_timestamper,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.375,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17970.0>}, {name,master_activity_events_pids_watcher}, {mfa, {master_activity_events_pids_watcher,start_link, []}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.375,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T16:47:56.376,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T16:47:56.376,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T16:47:56.376,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T16:47:56.376,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T16:47:56.376,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@127.0.0.1',ssl_rest_port} -> 18091 [ns_server:debug,2014-08-19T16:47:56.376,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: dynamic_config_version -> [2,5] [error_logger:info,2014-08-19T16:47:56.388,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17971.0>}, {name,master_activity_events_keeper}, {mfa,{master_activity_events_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.410,ns_1@127.0.0.1:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T16:47:56.410,ns_1@127.0.0.1:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T16:47:56.410,ns_1@127.0.0.1:<0.17930.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:47:56.411,ns_1@127.0.0.1:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T16:47:56.411,ns_1@127.0.0.1:<0.17930.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: xyzevwdfypcplvpp [error_logger:info,2014-08-19T16:47:56.427,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_ssl_services_sup} started: [{pid,<0.17975.0>}, {name,ns_ssl_services_setup}, {mfargs,{ns_ssl_services_setup,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.429,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_ssl_services_sup} started: [{pid,<0.17977.0>}, {name,ns_rest_ssl_service}, {mfargs, {ns_ssl_services_setup,start_link_rest_service,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.431,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_ssl_services_sup} started: [{pid,<0.17994.0>}, {name,ns_capi_ssl_service}, {mfargs, {ns_ssl_services_setup,start_link_capi_service,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.431,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.17974.0>}, {name,ns_ssl_services_sup}, {mfargs,{ns_ssl_services_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:47:56.431,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.18011.0>}, {name,menelaus_ui_auth}, {mfargs,{menelaus_ui_auth,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.431,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.18012.0>}, {name,menelaus_web_cache}, {mfargs,{menelaus_web_cache,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.431,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.18013.0>}, {name,menelaus_stats_gatherer}, {mfargs,{menelaus_stats_gatherer,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.431,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.18014.0>}, {name,menelaus_web}, {mfargs,{menelaus_web,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.432,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.18031.0>}, {name,menelaus_event}, {mfargs,{menelaus_event,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.432,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.18032.0>}, {name,hot_keys_keeper}, {mfargs,{hot_keys_keeper,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.432,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,menelaus_sup} started: [{pid,<0.18033.0>}, {name,menelaus_web_alerts_srv}, {mfargs,{menelaus_web_alerts_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,5000}, {child_type,worker}] [user:info,2014-08-19T16:47:56.432,ns_1@127.0.0.1:ns_server_sup<0.17906.0>:menelaus_sup:start_link:44]Couchbase Server has started on web port 8091 on node 'ns_1@127.0.0.1'. [error_logger:info,2014-08-19T16:47:56.432,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.17973.0>}, {name,menelaus}, {mfa,{menelaus_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:47:56.432,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.18035.0>}, {name,mc_couch_events}, {mfargs, {gen_event,start_link,[{local,mc_couch_events}]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2014-08-19T16:47:56.433,ns_1@127.0.0.1:<0.18037.0>:mc_tcp_listener:init:24]mccouch is listening on port 11213 [error_logger:info,2014-08-19T16:47:56.433,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.18036.0>}, {name,mc_conn_sup}, {mfargs,{mc_conn_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:47:56.433,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mc_sup} started: [{pid,<0.18037.0>}, {name,mc_tcp_listener}, {mfargs,{mc_tcp_listener,start_link,[11213]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.433,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.18034.0>}, {name,mc_sup}, {mfa,{mc_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:47:56.433,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.18038.0>}, {name,ns_ports_setup}, {mfa,{ns_ports_setup,start,[]}}, {restart_type,{permanent,4}}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2014-08-19T16:47:56.433,ns_1@127.0.0.1:<0.18041.0>:ns_memcached_log_rotator:init:28]Starting log rotator on "/opt/couchbase/var/lib/couchbase/logs"/"memcached.log"* with an initial period of 39003ms [error_logger:info,2014-08-19T16:47:56.433,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.18039.0>}, {name,ns_port_memcached_killer}, {mfa,{ns_ports_setup,start_memcached_force_killer,[]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.434,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.18041.0>}, {name,ns_memcached_log_rotator}, {mfa,{ns_memcached_log_rotator,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.434,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.18043.0>}, {name,memcached_clients_pool}, {mfa,{memcached_clients_pool,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.434,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.18044.0>}, {name,proxied_memcached_clients_pool}, {mfa,{proxied_memcached_clients_pool,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.434,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.18045.0>}, {name,xdc_lhttpc_pool}, {mfa, {lhttpc_manager,start_link, [[{name,xdc_lhttpc_pool}, {connection_timeout,120000}, {pool_size,200}]]}}, {restart_type,permanent}, {shutdown,10000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.434,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.18046.0>}, {name,ns_null_connection_pool}, {mfa, {ns_null_connection_pool,start_link, [ns_null_connection_pool]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.434,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.18047.0>}, {name,xdc_replication_sup}, {mfa,{xdc_replication_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:47:56.435,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.18048.0>}, {name,xdc_rep_manager}, {mfa,{xdc_rep_manager,start_link,[]}}, {restart_type,permanent}, {shutdown,30000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.435,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.18050.0>}, {name,ns_memcached_sockets_pool}, {mfa,{ns_memcached_sockets_pool,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.435,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_worker_sup} started: [{pid,<0.18053.0>}, {name,ns_bucket_worker}, {mfargs,{work_queue,start_link,[ns_bucket_worker]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.435,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.18055.0>}, {name,buckets_observing_subscription}, {mfargs,{ns_bucket_sup,subscribe_on_config_events,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.435,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_worker_sup} started: [{pid,<0.18054.0>}, {name,ns_bucket_sup}, {mfargs,{ns_bucket_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:47:56.436,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.18052.0>}, {name,ns_bucket_worker_sup}, {mfa,{ns_bucket_worker_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:47:56.436,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.18056.0>}, {name,system_stats_collector}, {mfa,{system_stats_collector,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.436,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.18059.0>}, {name,{stats_archiver,"@system"}}, {mfa,{stats_archiver,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.436,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.18061.0>}, {name,{stats_reader,"@system"}}, {mfa,{stats_reader,start_link,["@system"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.436,ns_1@127.0.0.1:compaction_daemon<0.18062.0>:compaction_daemon:handle_info:444]No buckets to compact. Rescheduling compaction. [ns_server:debug,2014-08-19T16:47:56.437,ns_1@127.0.0.1:compaction_daemon<0.18062.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [error_logger:info,2014-08-19T16:47:56.437,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.18062.0>}, {name,compaction_daemon}, {mfa,{compaction_daemon,start_link,[]}}, {restart_type,{permanent,4}}, {shutdown,86400000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:47:56.437,ns_1@127.0.0.1:xdc_rdoc_replication_srv<0.18064.0>:xdc_rdoc_replication_srv:init:76]Loaded the following docs: [] [ns_server:debug,2014-08-19T16:47:56.437,ns_1@127.0.0.1:xdc_rdoc_replication_srv<0.18064.0>:xdc_rdoc_replication_srv:handle_info:154]doing replicate_newnodes_docs [error_logger:info,2014-08-19T16:47:56.437,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.18064.0>}, {name,xdc_rdoc_replication_srv}, {mfa,{xdc_rdoc_replication_srv,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T16:47:56.437,ns_1@127.0.0.1:set_view_update_daemon<0.18066.0>:set_view_update_daemon:init:50]Set view update daemon, starting with the following settings: update interval: 5000ms minimum number of changes: 5000 [error_logger:info,2014-08-19T16:47:56.437,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.18066.0>}, {name,set_view_update_daemon}, {mfa,{set_view_update_daemon,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:47:56.437,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_cluster_sup} started: [{pid,<0.17906.0>}, {name,ns_server_sup}, {mfargs,{ns_server_sup,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [user:info,2014-08-19T16:47:56.438,ns_1@127.0.0.1:<0.17911.0>:ns_log:crash_consumption_loop:64]Port server moxi on node 'babysitter_of_ns_1@127.0.0.1' exited with status 0. Restarting. Messages: WARNING: curl error: transfer closed with outstanding read data remaining from: http://127.0.0.1:8091/pools/default/saslBucketsStreaming WARNING: curl error: couldn't connect to host from: http://127.0.0.1:8091/pools/default/saslBucketsStreaming ERROR: could not contact REST server(s): http://127.0.0.1:8091/pools/default/saslBucketsStreaming WARNING: curl error: couldn't connect to host from: http://127.0.0.1:8091/pools/default/saslBucketsStreaming ERROR: could not contact REST server(s): http://127.0.0.1:8091/pools/default/saslBucketsStreaming WARNING: curl error: couldn't connect to host from: http://127.0.0.1:8091/pools/default/saslBucketsStreaming EOL on stdin. Exiting [ns_server:debug,2014-08-19T16:47:56.451,ns_1@127.0.0.1:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T16:47:56.451,ns_1@127.0.0.1:<0.17932.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@127.0.0.1'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:47:56.451,ns_1@127.0.0.1:<0.17932.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@127.0.0.1'], with cookie: xyzevwdfypcplvpp [cluster:info,2014-08-19T16:47:56.479,ns_1@127.0.0.1:ns_cluster<0.17894.0>:ns_cluster:handle_call:171]Changing address to "127.0.0.1" due to client request [ns_server:debug,2014-08-19T16:47:56.488,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: memory_quota -> 90112 [ns_server:debug,2014-08-19T16:47:56.488,ns_1@127.0.0.1:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([memory_quota]..) [ns_server:debug,2014-08-19T16:48:09.314,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: settings -> [{stats,[{send_stats,false}]}] [ns_server:debug,2014-08-19T16:48:09.314,ns_1@127.0.0.1:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([settings]..) [user:info,2014-08-19T16:48:15.572,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_unused_buckets_db_files:492]Deleting old data files of bucket "tiles" [user:info,2014-08-19T16:48:15.572,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_unused_buckets_db_files:492]Deleting old data files of bucket "default" [ns_server:info,2014-08-19T16:48:15.580,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/master">>: ok [ns_server:info,2014-08-19T16:48:15.584,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/99">>: ok [ns_server:info,2014-08-19T16:48:15.586,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/98">>: ok [ns_server:info,2014-08-19T16:48:15.590,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/97">>: ok [ns_server:info,2014-08-19T16:48:15.593,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/96">>: ok [ns_server:info,2014-08-19T16:48:15.597,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/95">>: ok [ns_server:info,2014-08-19T16:48:15.600,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/94">>: ok [ns_server:info,2014-08-19T16:48:15.604,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/93">>: ok [ns_server:info,2014-08-19T16:48:15.607,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/92">>: ok [ns_server:info,2014-08-19T16:48:15.610,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/91">>: ok [ns_server:info,2014-08-19T16:48:15.613,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/90">>: ok [ns_server:info,2014-08-19T16:48:15.616,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/9">>: ok [ns_server:info,2014-08-19T16:48:15.619,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/89">>: ok [ns_server:info,2014-08-19T16:48:15.622,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/88">>: ok [ns_server:info,2014-08-19T16:48:15.625,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/87">>: ok [ns_server:info,2014-08-19T16:48:15.628,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/86">>: ok [ns_server:info,2014-08-19T16:48:15.631,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/852">>: ok [ns_server:info,2014-08-19T16:48:15.635,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/851">>: ok [ns_server:info,2014-08-19T16:48:15.639,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/850">>: ok [ns_server:info,2014-08-19T16:48:15.642,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/85">>: ok [ns_server:info,2014-08-19T16:48:15.645,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/849">>: ok [ns_server:info,2014-08-19T16:48:15.648,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/848">>: ok [ns_server:info,2014-08-19T16:48:15.651,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/847">>: ok [ns_server:info,2014-08-19T16:48:15.654,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/846">>: ok [ns_server:info,2014-08-19T16:48:15.656,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/845">>: ok [ns_server:info,2014-08-19T16:48:15.659,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/844">>: ok [ns_server:info,2014-08-19T16:48:15.662,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/843">>: ok [ns_server:info,2014-08-19T16:48:15.665,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/842">>: ok [ns_server:info,2014-08-19T16:48:15.669,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/841">>: ok [ns_server:info,2014-08-19T16:48:15.672,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/840">>: ok [ns_server:info,2014-08-19T16:48:15.674,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/84">>: ok [ns_server:info,2014-08-19T16:48:15.678,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/839">>: ok [ns_server:info,2014-08-19T16:48:15.681,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/838">>: ok [ns_server:info,2014-08-19T16:48:15.684,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/837">>: ok [ns_server:info,2014-08-19T16:48:15.687,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/836">>: ok [ns_server:info,2014-08-19T16:48:15.690,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/835">>: ok [ns_server:info,2014-08-19T16:48:15.693,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/834">>: ok [ns_server:info,2014-08-19T16:48:15.696,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/833">>: ok [ns_server:info,2014-08-19T16:48:15.699,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/832">>: ok [ns_server:info,2014-08-19T16:48:15.702,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/831">>: ok [ns_server:info,2014-08-19T16:48:15.706,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/830">>: ok [ns_server:info,2014-08-19T16:48:15.709,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/83">>: ok [ns_server:info,2014-08-19T16:48:15.712,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/829">>: ok [ns_server:info,2014-08-19T16:48:15.714,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/828">>: ok [ns_server:info,2014-08-19T16:48:15.717,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/827">>: ok [ns_server:info,2014-08-19T16:48:15.721,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/826">>: ok [ns_server:info,2014-08-19T16:48:15.724,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/825">>: ok [ns_server:info,2014-08-19T16:48:15.727,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/824">>: ok [ns_server:info,2014-08-19T16:48:15.731,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/823">>: ok [ns_server:info,2014-08-19T16:48:15.734,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/822">>: ok [ns_server:info,2014-08-19T16:48:15.737,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/821">>: ok [ns_server:info,2014-08-19T16:48:15.739,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/820">>: ok [ns_server:info,2014-08-19T16:48:15.742,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/82">>: ok [ns_server:info,2014-08-19T16:48:15.746,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/819">>: ok [ns_server:info,2014-08-19T16:48:15.749,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/818">>: ok [ns_server:info,2014-08-19T16:48:15.752,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/817">>: ok [ns_server:info,2014-08-19T16:48:15.755,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/816">>: ok [ns_server:info,2014-08-19T16:48:15.758,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/815">>: ok [ns_server:info,2014-08-19T16:48:15.760,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/814">>: ok [ns_server:info,2014-08-19T16:48:15.764,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/813">>: ok [ns_server:info,2014-08-19T16:48:15.767,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/812">>: ok [ns_server:info,2014-08-19T16:48:15.771,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/811">>: ok [ns_server:info,2014-08-19T16:48:15.773,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/810">>: ok [ns_server:info,2014-08-19T16:48:15.777,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/81">>: ok [ns_server:info,2014-08-19T16:48:15.779,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/809">>: ok [ns_server:info,2014-08-19T16:48:15.783,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/808">>: ok [ns_server:info,2014-08-19T16:48:15.786,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/807">>: ok [ns_server:info,2014-08-19T16:48:15.789,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/806">>: ok [ns_server:info,2014-08-19T16:48:15.792,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/805">>: ok [ns_server:info,2014-08-19T16:48:15.795,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/804">>: ok [ns_server:info,2014-08-19T16:48:15.798,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/803">>: ok [ns_server:info,2014-08-19T16:48:15.801,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/802">>: ok [ns_server:info,2014-08-19T16:48:15.804,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/801">>: ok [ns_server:info,2014-08-19T16:48:15.807,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/800">>: ok [ns_server:info,2014-08-19T16:48:15.810,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/80">>: ok [ns_server:info,2014-08-19T16:48:15.813,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/8">>: ok [ns_server:info,2014-08-19T16:48:15.816,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/799">>: ok [ns_server:info,2014-08-19T16:48:15.819,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/798">>: ok [ns_server:info,2014-08-19T16:48:15.822,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/797">>: ok [ns_server:info,2014-08-19T16:48:15.825,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/796">>: ok [ns_server:info,2014-08-19T16:48:15.828,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/795">>: ok [ns_server:info,2014-08-19T16:48:15.831,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/794">>: ok [ns_server:info,2014-08-19T16:48:15.834,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/793">>: ok [ns_server:info,2014-08-19T16:48:15.837,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/792">>: ok [ns_server:info,2014-08-19T16:48:15.839,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/791">>: ok [ns_server:info,2014-08-19T16:48:15.842,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/790">>: ok [ns_server:info,2014-08-19T16:48:15.845,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/79">>: ok [ns_server:info,2014-08-19T16:48:15.848,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/789">>: ok [ns_server:info,2014-08-19T16:48:15.851,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/788">>: ok [ns_server:info,2014-08-19T16:48:15.854,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/787">>: ok [ns_server:info,2014-08-19T16:48:15.857,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/786">>: ok [ns_server:info,2014-08-19T16:48:15.860,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/785">>: ok [ns_server:info,2014-08-19T16:48:15.862,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/784">>: ok [ns_server:info,2014-08-19T16:48:15.865,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/783">>: ok [ns_server:info,2014-08-19T16:48:15.868,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/782">>: ok [ns_server:info,2014-08-19T16:48:15.871,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/781">>: ok [ns_server:info,2014-08-19T16:48:15.874,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/780">>: ok [ns_server:info,2014-08-19T16:48:15.877,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/78">>: ok [ns_server:info,2014-08-19T16:48:15.879,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/779">>: ok [ns_server:info,2014-08-19T16:48:15.882,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/778">>: ok [ns_server:info,2014-08-19T16:48:15.886,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/777">>: ok [ns_server:info,2014-08-19T16:48:15.889,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/776">>: ok [ns_server:info,2014-08-19T16:48:15.891,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/775">>: ok [ns_server:info,2014-08-19T16:48:15.894,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/774">>: ok [ns_server:info,2014-08-19T16:48:15.897,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/773">>: ok [ns_server:info,2014-08-19T16:48:15.899,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/772">>: ok [ns_server:info,2014-08-19T16:48:15.902,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/771">>: ok [ns_server:info,2014-08-19T16:48:15.905,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/770">>: ok [ns_server:info,2014-08-19T16:48:15.908,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/77">>: ok [ns_server:info,2014-08-19T16:48:15.910,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/769">>: ok [ns_server:info,2014-08-19T16:48:15.913,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/768">>: ok [ns_server:info,2014-08-19T16:48:15.916,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/76">>: ok [ns_server:info,2014-08-19T16:48:15.918,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/75">>: ok [ns_server:info,2014-08-19T16:48:15.921,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/74">>: ok [ns_server:info,2014-08-19T16:48:15.923,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/73">>: ok [ns_server:info,2014-08-19T16:48:15.926,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/72">>: ok [ns_server:info,2014-08-19T16:48:15.929,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/71">>: ok [ns_server:info,2014-08-19T16:48:15.931,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/70">>: ok [ns_server:info,2014-08-19T16:48:15.934,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/7">>: ok [ns_server:info,2014-08-19T16:48:15.936,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/69">>: ok [ns_server:info,2014-08-19T16:48:15.939,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/68">>: ok [ns_server:info,2014-08-19T16:48:15.941,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/67">>: ok [ns_server:info,2014-08-19T16:48:15.944,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/66">>: ok [ns_server:info,2014-08-19T16:48:15.947,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/65">>: ok [ns_server:info,2014-08-19T16:48:15.949,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/64">>: ok [ns_server:info,2014-08-19T16:48:15.952,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/63">>: ok [ns_server:info,2014-08-19T16:48:15.954,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/62">>: ok [ns_server:info,2014-08-19T16:48:15.957,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/61">>: ok [ns_server:info,2014-08-19T16:48:15.959,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/60">>: ok [ns_server:info,2014-08-19T16:48:15.962,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/6">>: ok [ns_server:info,2014-08-19T16:48:15.964,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/596">>: ok [ns_server:info,2014-08-19T16:48:15.967,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/595">>: ok [ns_server:info,2014-08-19T16:48:15.970,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/594">>: ok [ns_server:info,2014-08-19T16:48:15.972,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/593">>: ok [ns_server:info,2014-08-19T16:48:15.975,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/592">>: ok [ns_server:info,2014-08-19T16:48:15.977,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/591">>: ok [ns_server:info,2014-08-19T16:48:15.979,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/590">>: ok [ns_server:info,2014-08-19T16:48:15.982,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/59">>: ok [ns_server:info,2014-08-19T16:48:15.985,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/589">>: ok [ns_server:info,2014-08-19T16:48:15.987,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/588">>: ok [ns_server:info,2014-08-19T16:48:15.990,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/587">>: ok [ns_server:info,2014-08-19T16:48:15.992,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/586">>: ok [ns_server:info,2014-08-19T16:48:15.995,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/585">>: ok [ns_server:info,2014-08-19T16:48:15.997,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/584">>: ok [ns_server:info,2014-08-19T16:48:16.000,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/583">>: ok [ns_server:info,2014-08-19T16:48:16.002,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/582">>: ok [ns_server:info,2014-08-19T16:48:16.005,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/581">>: ok [ns_server:info,2014-08-19T16:48:16.007,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/580">>: ok [ns_server:info,2014-08-19T16:48:16.009,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/58">>: ok [ns_server:info,2014-08-19T16:48:16.012,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/579">>: ok [ns_server:info,2014-08-19T16:48:16.014,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/578">>: ok [ns_server:info,2014-08-19T16:48:16.017,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/577">>: ok [ns_server:info,2014-08-19T16:48:16.019,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/576">>: ok [ns_server:info,2014-08-19T16:48:16.021,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/575">>: ok [ns_server:info,2014-08-19T16:48:16.024,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/574">>: ok [ns_server:info,2014-08-19T16:48:16.026,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/573">>: ok [ns_server:info,2014-08-19T16:48:16.028,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/572">>: ok [ns_server:info,2014-08-19T16:48:16.031,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/571">>: ok [ns_server:info,2014-08-19T16:48:16.033,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/570">>: ok [ns_server:info,2014-08-19T16:48:16.036,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/57">>: ok [ns_server:info,2014-08-19T16:48:16.038,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/569">>: ok [ns_server:info,2014-08-19T16:48:16.041,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/568">>: ok [ns_server:info,2014-08-19T16:48:16.043,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/567">>: ok [ns_server:info,2014-08-19T16:48:16.045,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/566">>: ok [ns_server:info,2014-08-19T16:48:16.047,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/565">>: ok [ns_server:info,2014-08-19T16:48:16.050,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/564">>: ok [ns_server:info,2014-08-19T16:48:16.052,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/563">>: ok [ns_server:info,2014-08-19T16:48:16.055,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/562">>: ok [ns_server:info,2014-08-19T16:48:16.057,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/561">>: ok [ns_server:info,2014-08-19T16:48:16.059,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/560">>: ok [ns_server:info,2014-08-19T16:48:16.062,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/56">>: ok [ns_server:info,2014-08-19T16:48:16.064,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/559">>: ok [ns_server:info,2014-08-19T16:48:16.066,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/558">>: ok [ns_server:info,2014-08-19T16:48:16.068,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/557">>: ok [ns_server:info,2014-08-19T16:48:16.071,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/556">>: ok [ns_server:info,2014-08-19T16:48:16.073,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/555">>: ok [ns_server:info,2014-08-19T16:48:16.076,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/554">>: ok [ns_server:info,2014-08-19T16:48:16.078,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/553">>: ok [ns_server:info,2014-08-19T16:48:16.080,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/552">>: ok [ns_server:info,2014-08-19T16:48:16.083,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/551">>: ok [ns_server:info,2014-08-19T16:48:16.085,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/550">>: ok [ns_server:info,2014-08-19T16:48:16.088,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/55">>: ok [ns_server:info,2014-08-19T16:48:16.089,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/549">>: ok [ns_server:info,2014-08-19T16:48:16.091,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/548">>: ok [ns_server:info,2014-08-19T16:48:16.093,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/547">>: ok [ns_server:info,2014-08-19T16:48:16.095,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/546">>: ok [ns_server:info,2014-08-19T16:48:16.098,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/545">>: ok [ns_server:info,2014-08-19T16:48:16.101,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/544">>: ok [ns_server:info,2014-08-19T16:48:16.103,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/543">>: ok [ns_server:info,2014-08-19T16:48:16.105,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/542">>: ok [ns_server:info,2014-08-19T16:48:16.107,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/541">>: ok [ns_server:info,2014-08-19T16:48:16.110,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/540">>: ok [ns_server:info,2014-08-19T16:48:16.112,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/54">>: ok [ns_server:info,2014-08-19T16:48:16.114,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/539">>: ok [ns_server:info,2014-08-19T16:48:16.116,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/538">>: ok [ns_server:info,2014-08-19T16:48:16.118,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/537">>: ok [ns_server:info,2014-08-19T16:48:16.121,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/536">>: ok [ns_server:info,2014-08-19T16:48:16.123,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/535">>: ok [ns_server:info,2014-08-19T16:48:16.125,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/534">>: ok [ns_server:info,2014-08-19T16:48:16.127,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/533">>: ok [ns_server:info,2014-08-19T16:48:16.129,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/532">>: ok [ns_server:info,2014-08-19T16:48:16.131,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/531">>: ok [ns_server:info,2014-08-19T16:48:16.134,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/530">>: ok [ns_server:info,2014-08-19T16:48:16.136,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/53">>: ok [ns_server:info,2014-08-19T16:48:16.138,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/529">>: ok [ns_server:info,2014-08-19T16:48:16.140,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/528">>: ok [ns_server:info,2014-08-19T16:48:16.142,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/527">>: ok [ns_server:info,2014-08-19T16:48:16.144,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/526">>: ok [ns_server:info,2014-08-19T16:48:16.146,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/525">>: ok [ns_server:info,2014-08-19T16:48:16.149,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/524">>: ok [ns_server:info,2014-08-19T16:48:16.151,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/523">>: ok [ns_server:info,2014-08-19T16:48:16.153,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/522">>: ok [ns_server:info,2014-08-19T16:48:16.155,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/521">>: ok [ns_server:info,2014-08-19T16:48:16.157,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/520">>: ok [ns_server:info,2014-08-19T16:48:16.160,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/52">>: ok [ns_server:info,2014-08-19T16:48:16.162,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/519">>: ok [ns_server:info,2014-08-19T16:48:16.164,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/518">>: ok [ns_server:info,2014-08-19T16:48:16.166,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/517">>: ok [ns_server:info,2014-08-19T16:48:16.168,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/516">>: ok [ns_server:info,2014-08-19T16:48:16.170,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/515">>: ok [ns_server:info,2014-08-19T16:48:16.172,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/514">>: ok [ns_server:info,2014-08-19T16:48:16.174,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/513">>: ok [ns_server:info,2014-08-19T16:48:16.176,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/512">>: ok [ns_server:info,2014-08-19T16:48:16.179,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/51">>: ok [ns_server:info,2014-08-19T16:48:16.181,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/50">>: ok [ns_server:info,2014-08-19T16:48:16.183,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/5">>: ok [ns_server:info,2014-08-19T16:48:16.185,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/49">>: ok [ns_server:info,2014-08-19T16:48:16.187,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/48">>: ok [ns_server:info,2014-08-19T16:48:16.189,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/47">>: ok [ns_server:info,2014-08-19T16:48:16.190,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/46">>: ok [ns_server:info,2014-08-19T16:48:16.193,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/45">>: ok [ns_server:info,2014-08-19T16:48:16.195,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/44">>: ok [ns_server:info,2014-08-19T16:48:16.197,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/43">>: ok [ns_server:info,2014-08-19T16:48:16.198,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/42">>: ok [ns_server:info,2014-08-19T16:48:16.201,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/41">>: ok [ns_server:info,2014-08-19T16:48:16.202,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/40">>: ok [ns_server:info,2014-08-19T16:48:16.204,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/4">>: ok [ns_server:info,2014-08-19T16:48:16.206,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/39">>: ok [ns_server:info,2014-08-19T16:48:16.208,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/38">>: ok [ns_server:info,2014-08-19T16:48:16.210,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/37">>: ok [ns_server:info,2014-08-19T16:48:16.212,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/36">>: ok [ns_server:info,2014-08-19T16:48:16.214,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/35">>: ok [ns_server:info,2014-08-19T16:48:16.216,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/341">>: ok [ns_server:info,2014-08-19T16:48:16.218,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/340">>: ok [ns_server:info,2014-08-19T16:48:16.220,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/34">>: ok [ns_server:info,2014-08-19T16:48:16.222,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/339">>: ok [ns_server:info,2014-08-19T16:48:16.224,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/338">>: ok [ns_server:info,2014-08-19T16:48:16.226,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/337">>: ok [ns_server:info,2014-08-19T16:48:16.228,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/336">>: ok [ns_server:info,2014-08-19T16:48:16.230,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/335">>: ok [ns_server:info,2014-08-19T16:48:16.232,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/334">>: ok [ns_server:info,2014-08-19T16:48:16.234,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/333">>: ok [ns_server:info,2014-08-19T16:48:16.235,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/332">>: ok [ns_server:info,2014-08-19T16:48:16.237,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/331">>: ok [ns_server:info,2014-08-19T16:48:16.239,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/330">>: ok [ns_server:info,2014-08-19T16:48:16.241,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/33">>: ok [ns_server:info,2014-08-19T16:48:16.243,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/329">>: ok [ns_server:info,2014-08-19T16:48:16.245,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/328">>: ok [ns_server:info,2014-08-19T16:48:16.247,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/327">>: ok [ns_server:info,2014-08-19T16:48:16.249,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/326">>: ok [ns_server:info,2014-08-19T16:48:16.251,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/325">>: ok [ns_server:info,2014-08-19T16:48:16.253,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/324">>: ok [ns_server:info,2014-08-19T16:48:16.255,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/323">>: ok [ns_server:info,2014-08-19T16:48:16.257,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/322">>: ok [ns_server:info,2014-08-19T16:48:16.258,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/321">>: ok [ns_server:info,2014-08-19T16:48:16.260,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/320">>: ok [ns_server:info,2014-08-19T16:48:16.262,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/32">>: ok [ns_server:info,2014-08-19T16:48:16.264,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/319">>: ok [ns_server:info,2014-08-19T16:48:16.266,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/318">>: ok [ns_server:info,2014-08-19T16:48:16.267,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/317">>: ok [ns_server:info,2014-08-19T16:48:16.269,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/316">>: ok [ns_server:info,2014-08-19T16:48:16.271,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/315">>: ok [ns_server:info,2014-08-19T16:48:16.273,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/314">>: ok [ns_server:info,2014-08-19T16:48:16.275,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/313">>: ok [ns_server:info,2014-08-19T16:48:16.276,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/312">>: ok [ns_server:info,2014-08-19T16:48:16.278,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/311">>: ok [ns_server:info,2014-08-19T16:48:16.280,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/310">>: ok [ns_server:info,2014-08-19T16:48:16.282,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/31">>: ok [ns_server:info,2014-08-19T16:48:16.284,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/309">>: ok [ns_server:info,2014-08-19T16:48:16.286,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/308">>: ok [ns_server:info,2014-08-19T16:48:16.287,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/307">>: ok [ns_server:info,2014-08-19T16:48:16.289,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/306">>: ok [ns_server:info,2014-08-19T16:48:16.291,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/305">>: ok [ns_server:info,2014-08-19T16:48:16.293,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/304">>: ok [ns_server:info,2014-08-19T16:48:16.294,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/303">>: ok [ns_server:info,2014-08-19T16:48:16.296,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/302">>: ok [ns_server:info,2014-08-19T16:48:16.298,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/301">>: ok [ns_server:info,2014-08-19T16:48:16.300,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/300">>: ok [ns_server:info,2014-08-19T16:48:16.301,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/30">>: ok [ns_server:info,2014-08-19T16:48:16.303,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/3">>: ok [ns_server:info,2014-08-19T16:48:16.305,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/299">>: ok [ns_server:info,2014-08-19T16:48:16.307,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/298">>: ok [ns_server:info,2014-08-19T16:48:16.308,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/297">>: ok [ns_server:info,2014-08-19T16:48:16.310,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/296">>: ok [ns_server:info,2014-08-19T16:48:16.312,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/295">>: ok [ns_server:info,2014-08-19T16:48:16.313,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/294">>: ok [ns_server:info,2014-08-19T16:48:16.315,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/293">>: ok [ns_server:info,2014-08-19T16:48:16.316,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/292">>: ok [ns_server:info,2014-08-19T16:48:16.318,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/291">>: ok [ns_server:info,2014-08-19T16:48:16.319,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/290">>: ok [ns_server:info,2014-08-19T16:48:16.321,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/29">>: ok [ns_server:info,2014-08-19T16:48:16.322,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/289">>: ok [ns_server:info,2014-08-19T16:48:16.324,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/288">>: ok [ns_server:info,2014-08-19T16:48:16.326,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/287">>: ok [ns_server:info,2014-08-19T16:48:16.327,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/286">>: ok [ns_server:info,2014-08-19T16:48:16.328,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/285">>: ok [ns_server:info,2014-08-19T16:48:16.329,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/284">>: ok [ns_server:info,2014-08-19T16:48:16.331,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/283">>: ok [ns_server:info,2014-08-19T16:48:16.332,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/282">>: ok [ns_server:info,2014-08-19T16:48:16.333,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/281">>: ok [ns_server:info,2014-08-19T16:48:16.334,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/280">>: ok [ns_server:info,2014-08-19T16:48:16.336,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/28">>: ok [ns_server:info,2014-08-19T16:48:16.337,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/279">>: ok [ns_server:info,2014-08-19T16:48:16.338,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/278">>: ok [ns_server:info,2014-08-19T16:48:16.339,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/277">>: ok [ns_server:info,2014-08-19T16:48:16.340,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/276">>: ok [ns_server:info,2014-08-19T16:48:16.342,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/275">>: ok [ns_server:info,2014-08-19T16:48:16.344,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/274">>: ok [ns_server:info,2014-08-19T16:48:16.346,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/273">>: ok [ns_server:info,2014-08-19T16:48:16.347,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/272">>: ok [ns_server:info,2014-08-19T16:48:16.349,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/271">>: ok [ns_server:info,2014-08-19T16:48:16.350,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/270">>: ok [ns_server:info,2014-08-19T16:48:16.352,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/27">>: ok [ns_server:info,2014-08-19T16:48:16.353,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/269">>: ok [ns_server:info,2014-08-19T16:48:16.355,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/268">>: ok [ns_server:info,2014-08-19T16:48:16.356,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/267">>: ok [ns_server:info,2014-08-19T16:48:16.358,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/266">>: ok [ns_server:info,2014-08-19T16:48:16.359,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/265">>: ok [ns_server:info,2014-08-19T16:48:16.361,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/264">>: ok [ns_server:info,2014-08-19T16:48:16.362,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/263">>: ok [ns_server:info,2014-08-19T16:48:16.364,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/262">>: ok [ns_server:info,2014-08-19T16:48:16.365,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/261">>: ok [ns_server:info,2014-08-19T16:48:16.367,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/260">>: ok [ns_server:info,2014-08-19T16:48:16.368,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/26">>: ok [ns_server:info,2014-08-19T16:48:16.370,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/259">>: ok [ns_server:info,2014-08-19T16:48:16.372,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/258">>: ok [ns_server:info,2014-08-19T16:48:16.373,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/257">>: ok [ns_server:info,2014-08-19T16:48:16.374,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/256">>: ok [ns_server:info,2014-08-19T16:48:16.376,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/255">>: ok [ns_server:info,2014-08-19T16:48:16.377,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/254">>: ok [ns_server:info,2014-08-19T16:48:16.379,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/253">>: ok [ns_server:info,2014-08-19T16:48:16.380,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/252">>: ok [ns_server:info,2014-08-19T16:48:16.381,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/251">>: ok [ns_server:info,2014-08-19T16:48:16.383,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/250">>: ok [ns_server:info,2014-08-19T16:48:16.384,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/25">>: ok [ns_server:info,2014-08-19T16:48:16.385,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/249">>: ok [ns_server:info,2014-08-19T16:48:16.387,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/248">>: ok [ns_server:info,2014-08-19T16:48:16.388,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/247">>: ok [ns_server:info,2014-08-19T16:48:16.390,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/246">>: ok [ns_server:info,2014-08-19T16:48:16.391,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/245">>: ok [ns_server:info,2014-08-19T16:48:16.393,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/244">>: ok [ns_server:info,2014-08-19T16:48:16.394,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/243">>: ok [ns_server:info,2014-08-19T16:48:16.395,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/242">>: ok [ns_server:info,2014-08-19T16:48:16.397,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/241">>: ok [ns_server:info,2014-08-19T16:48:16.398,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/240">>: ok [ns_server:info,2014-08-19T16:48:16.400,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/24">>: ok [ns_server:info,2014-08-19T16:48:16.401,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/239">>: ok [ns_server:info,2014-08-19T16:48:16.402,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/238">>: ok [ns_server:info,2014-08-19T16:48:16.404,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/237">>: ok [ns_server:info,2014-08-19T16:48:16.405,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/236">>: ok [ns_server:info,2014-08-19T16:48:16.407,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/235">>: ok [ns_server:info,2014-08-19T16:48:16.408,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/234">>: ok [ns_server:info,2014-08-19T16:48:16.409,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/233">>: ok [ns_server:info,2014-08-19T16:48:16.411,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/232">>: ok [ns_server:info,2014-08-19T16:48:16.412,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/231">>: ok [ns_server:info,2014-08-19T16:48:16.413,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/230">>: ok [ns_server:info,2014-08-19T16:48:16.414,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/23">>: ok [ns_server:info,2014-08-19T16:48:16.416,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/229">>: ok [ns_server:info,2014-08-19T16:48:16.417,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/228">>: ok [ns_server:info,2014-08-19T16:48:16.418,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/227">>: ok [ns_server:info,2014-08-19T16:48:16.420,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/226">>: ok [ns_server:info,2014-08-19T16:48:16.421,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/225">>: ok [ns_server:info,2014-08-19T16:48:16.423,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/224">>: ok [ns_server:info,2014-08-19T16:48:16.424,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/223">>: ok [ns_server:info,2014-08-19T16:48:16.425,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/222">>: ok [ns_server:info,2014-08-19T16:48:16.427,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/221">>: ok [ns_server:info,2014-08-19T16:48:16.428,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/220">>: ok [ns_server:info,2014-08-19T16:48:16.429,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/22">>: ok [ns_server:info,2014-08-19T16:48:16.431,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/219">>: ok [ns_server:info,2014-08-19T16:48:16.432,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/218">>: ok [ns_server:info,2014-08-19T16:48:16.433,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/217">>: ok [ns_server:info,2014-08-19T16:48:16.435,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/216">>: ok [ns_server:info,2014-08-19T16:48:16.436,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/215">>: ok [ns_server:info,2014-08-19T16:48:16.437,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/214">>: ok [ns_server:info,2014-08-19T16:48:16.438,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/213">>: ok [ns_server:info,2014-08-19T16:48:16.440,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/212">>: ok [ns_server:info,2014-08-19T16:48:16.441,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/211">>: ok [ns_server:info,2014-08-19T16:48:16.443,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/210">>: ok [ns_server:info,2014-08-19T16:48:16.444,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/21">>: ok [ns_server:info,2014-08-19T16:48:16.445,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/209">>: ok [ns_server:info,2014-08-19T16:48:16.447,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/208">>: ok [ns_server:info,2014-08-19T16:48:16.448,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/207">>: ok [ns_server:info,2014-08-19T16:48:16.449,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/206">>: ok [ns_server:info,2014-08-19T16:48:16.450,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/205">>: ok [ns_server:info,2014-08-19T16:48:16.451,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/204">>: ok [ns_server:info,2014-08-19T16:48:16.453,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/203">>: ok [ns_server:info,2014-08-19T16:48:16.454,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/202">>: ok [ns_server:info,2014-08-19T16:48:16.455,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/201">>: ok [ns_server:info,2014-08-19T16:48:16.456,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/200">>: ok [ns_server:info,2014-08-19T16:48:16.457,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/20">>: ok [ns_server:info,2014-08-19T16:48:16.458,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/2">>: ok [ns_server:info,2014-08-19T16:48:16.459,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/199">>: ok [ns_server:info,2014-08-19T16:48:16.461,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/198">>: ok [ns_server:info,2014-08-19T16:48:16.462,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/197">>: ok [ns_server:info,2014-08-19T16:48:16.463,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/196">>: ok [ns_server:info,2014-08-19T16:48:16.464,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/195">>: ok [ns_server:info,2014-08-19T16:48:16.465,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/194">>: ok [ns_server:info,2014-08-19T16:48:16.466,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/193">>: ok [ns_server:info,2014-08-19T16:48:16.467,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/192">>: ok [ns_server:info,2014-08-19T16:48:16.468,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/191">>: ok [ns_server:info,2014-08-19T16:48:16.469,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/190">>: ok [ns_server:info,2014-08-19T16:48:16.470,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/19">>: ok [ns_server:info,2014-08-19T16:48:16.471,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/189">>: ok [ns_server:info,2014-08-19T16:48:16.472,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/188">>: ok [ns_server:info,2014-08-19T16:48:16.473,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/187">>: ok [ns_server:info,2014-08-19T16:48:16.474,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/186">>: ok [ns_server:info,2014-08-19T16:48:16.475,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/185">>: ok [ns_server:info,2014-08-19T16:48:16.476,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/184">>: ok [ns_server:info,2014-08-19T16:48:16.477,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/183">>: ok [ns_server:info,2014-08-19T16:48:16.478,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/182">>: ok [ns_server:info,2014-08-19T16:48:16.480,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/181">>: ok [ns_server:info,2014-08-19T16:48:16.481,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/180">>: ok [ns_server:info,2014-08-19T16:48:16.482,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/18">>: ok [ns_server:info,2014-08-19T16:48:16.483,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/179">>: ok [ns_server:info,2014-08-19T16:48:16.484,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/178">>: ok [ns_server:info,2014-08-19T16:48:16.485,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/177">>: ok [ns_server:info,2014-08-19T16:48:16.486,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/176">>: ok [ns_server:info,2014-08-19T16:48:16.487,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/175">>: ok [ns_server:info,2014-08-19T16:48:16.489,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/174">>: ok [ns_server:info,2014-08-19T16:48:16.490,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/173">>: ok [ns_server:info,2014-08-19T16:48:16.491,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/172">>: ok [ns_server:info,2014-08-19T16:48:16.492,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/171">>: ok [ns_server:info,2014-08-19T16:48:16.493,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/170">>: ok [ns_server:info,2014-08-19T16:48:16.494,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/17">>: ok [ns_server:info,2014-08-19T16:48:16.495,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/169">>: ok [ns_server:info,2014-08-19T16:48:16.496,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/168">>: ok [ns_server:info,2014-08-19T16:48:16.497,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/167">>: ok [ns_server:info,2014-08-19T16:48:16.499,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/166">>: ok [ns_server:info,2014-08-19T16:48:16.500,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/165">>: ok [ns_server:info,2014-08-19T16:48:16.501,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/164">>: ok [ns_server:info,2014-08-19T16:48:16.502,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/163">>: ok [ns_server:info,2014-08-19T16:48:16.503,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/162">>: ok [ns_server:info,2014-08-19T16:48:16.504,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/161">>: ok [ns_server:info,2014-08-19T16:48:16.505,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/160">>: ok [ns_server:info,2014-08-19T16:48:16.506,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/16">>: ok [ns_server:info,2014-08-19T16:48:16.507,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/159">>: ok [ns_server:info,2014-08-19T16:48:16.508,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/158">>: ok [ns_server:info,2014-08-19T16:48:16.509,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/157">>: ok [ns_server:info,2014-08-19T16:48:16.510,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/156">>: ok [ns_server:info,2014-08-19T16:48:16.511,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/155">>: ok [ns_server:info,2014-08-19T16:48:16.512,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/154">>: ok [ns_server:info,2014-08-19T16:48:16.513,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/153">>: ok [ns_server:info,2014-08-19T16:48:16.514,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/152">>: ok [ns_server:info,2014-08-19T16:48:16.515,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/151">>: ok [ns_server:info,2014-08-19T16:48:16.516,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/150">>: ok [ns_server:info,2014-08-19T16:48:16.517,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/15">>: ok [ns_server:info,2014-08-19T16:48:16.517,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/149">>: ok [ns_server:info,2014-08-19T16:48:16.518,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/148">>: ok [ns_server:info,2014-08-19T16:48:16.519,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/147">>: ok [ns_server:info,2014-08-19T16:48:16.520,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/146">>: ok [ns_server:info,2014-08-19T16:48:16.521,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/145">>: ok [ns_server:info,2014-08-19T16:48:16.522,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/144">>: ok [ns_server:info,2014-08-19T16:48:16.523,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/143">>: ok [ns_server:info,2014-08-19T16:48:16.524,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/142">>: ok [ns_server:info,2014-08-19T16:48:16.525,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/141">>: ok [ns_server:info,2014-08-19T16:48:16.526,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/140">>: ok [ns_server:info,2014-08-19T16:48:16.527,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/14">>: ok [ns_server:info,2014-08-19T16:48:16.527,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/139">>: ok [ns_server:info,2014-08-19T16:48:16.528,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/138">>: ok [ns_server:info,2014-08-19T16:48:16.529,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/137">>: ok [ns_server:info,2014-08-19T16:48:16.530,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/136">>: ok [ns_server:info,2014-08-19T16:48:16.531,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/135">>: ok [ns_server:info,2014-08-19T16:48:16.532,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/134">>: ok [ns_server:info,2014-08-19T16:48:16.532,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/133">>: ok [ns_server:info,2014-08-19T16:48:16.533,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/132">>: ok [ns_server:info,2014-08-19T16:48:16.534,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/131">>: ok [ns_server:info,2014-08-19T16:48:16.535,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/130">>: ok [ns_server:info,2014-08-19T16:48:16.536,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/13">>: ok [ns_server:info,2014-08-19T16:48:16.537,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/129">>: ok [ns_server:info,2014-08-19T16:48:16.538,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/128">>: ok [ns_server:info,2014-08-19T16:48:16.538,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/127">>: ok [ns_server:info,2014-08-19T16:48:16.539,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/126">>: ok [ns_server:info,2014-08-19T16:48:16.540,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/125">>: ok [ns_server:info,2014-08-19T16:48:16.541,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/124">>: ok [ns_server:info,2014-08-19T16:48:16.542,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/123">>: ok [ns_server:info,2014-08-19T16:48:16.543,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/122">>: ok [ns_server:info,2014-08-19T16:48:16.543,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/121">>: ok [ns_server:info,2014-08-19T16:48:16.544,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/120">>: ok [ns_server:info,2014-08-19T16:48:16.545,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/12">>: ok [ns_server:info,2014-08-19T16:48:16.546,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/119">>: ok [ns_server:info,2014-08-19T16:48:16.546,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/118">>: ok [ns_server:info,2014-08-19T16:48:16.547,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/117">>: ok [ns_server:info,2014-08-19T16:48:16.548,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/116">>: ok [ns_server:info,2014-08-19T16:48:16.549,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/115">>: ok [ns_server:info,2014-08-19T16:48:16.549,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/114">>: ok [ns_server:info,2014-08-19T16:48:16.550,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/113">>: ok [ns_server:info,2014-08-19T16:48:16.551,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/112">>: ok [ns_server:info,2014-08-19T16:48:16.552,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/111">>: ok [ns_server:info,2014-08-19T16:48:16.552,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/110">>: ok [ns_server:info,2014-08-19T16:48:16.553,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/11">>: ok [ns_server:info,2014-08-19T16:48:16.554,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/109">>: ok [ns_server:info,2014-08-19T16:48:16.554,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/108">>: ok [ns_server:info,2014-08-19T16:48:16.555,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/107">>: ok [ns_server:info,2014-08-19T16:48:16.556,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/106">>: ok [ns_server:info,2014-08-19T16:48:16.556,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/105">>: ok [ns_server:info,2014-08-19T16:48:16.557,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/104">>: ok [ns_server:info,2014-08-19T16:48:16.558,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/103">>: ok [ns_server:info,2014-08-19T16:48:16.558,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/102">>: ok [ns_server:info,2014-08-19T16:48:16.559,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/101">>: ok [ns_server:info,2014-08-19T16:48:16.560,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/100">>: ok [ns_server:info,2014-08-19T16:48:16.560,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/10">>: ok [ns_server:info,2014-08-19T16:48:16.561,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/1">>: ok [ns_server:info,2014-08-19T16:48:16.562,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"tiles/0">>: ok [ns_server:info,2014-08-19T16:48:16.562,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_databases_and_files:436]Couch dbs are deleted. Proceeding with bucket directory [ns_server:debug,2014-08-19T16:48:16.562,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:do_delete_bucket_indexes:457]indexes directory doesn't exist already. fine. [ns_server:info,2014-08-19T16:48:16.565,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/master">>: ok [ns_server:info,2014-08-19T16:48:16.567,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/99">>: ok [ns_server:info,2014-08-19T16:48:16.569,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/98">>: ok [ns_server:info,2014-08-19T16:48:16.571,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/97">>: ok [ns_server:info,2014-08-19T16:48:16.574,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/96">>: ok [ns_server:info,2014-08-19T16:48:16.576,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/95">>: ok [ns_server:info,2014-08-19T16:48:16.578,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/94">>: ok [ns_server:info,2014-08-19T16:48:16.580,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/93">>: ok [ns_server:info,2014-08-19T16:48:16.584,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/92">>: ok [ns_server:info,2014-08-19T16:48:16.587,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/91">>: ok [ns_server:info,2014-08-19T16:48:16.590,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/90">>: ok [ns_server:info,2014-08-19T16:48:16.593,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/9">>: ok [ns_server:info,2014-08-19T16:48:16.595,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/89">>: ok [ns_server:info,2014-08-19T16:48:16.598,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/88">>: ok [ns_server:info,2014-08-19T16:48:16.601,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/87">>: ok [ns_server:info,2014-08-19T16:48:16.603,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/86">>: ok [ns_server:info,2014-08-19T16:48:16.606,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/852">>: ok [ns_server:info,2014-08-19T16:48:16.608,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/851">>: ok [ns_server:info,2014-08-19T16:48:16.610,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/850">>: ok [ns_server:info,2014-08-19T16:48:16.612,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/85">>: ok [ns_server:info,2014-08-19T16:48:16.615,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/849">>: ok [ns_server:info,2014-08-19T16:48:16.617,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/848">>: ok [ns_server:info,2014-08-19T16:48:16.619,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/847">>: ok [ns_server:info,2014-08-19T16:48:16.621,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/846">>: ok [ns_server:info,2014-08-19T16:48:16.623,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/845">>: ok [ns_server:info,2014-08-19T16:48:16.625,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/844">>: ok [ns_server:info,2014-08-19T16:48:16.627,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/843">>: ok [ns_server:info,2014-08-19T16:48:16.629,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/842">>: ok [ns_server:info,2014-08-19T16:48:16.631,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/841">>: ok [ns_server:info,2014-08-19T16:48:16.634,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/840">>: ok [ns_server:info,2014-08-19T16:48:16.637,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/84">>: ok [ns_server:info,2014-08-19T16:48:16.640,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/839">>: ok [ns_server:info,2014-08-19T16:48:16.642,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/838">>: ok [ns_server:info,2014-08-19T16:48:16.644,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/837">>: ok [ns_server:info,2014-08-19T16:48:16.646,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/836">>: ok [ns_server:info,2014-08-19T16:48:16.648,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/835">>: ok [ns_server:info,2014-08-19T16:48:16.650,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/834">>: ok [ns_server:info,2014-08-19T16:48:16.653,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/833">>: ok [ns_server:info,2014-08-19T16:48:16.656,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/832">>: ok [ns_server:info,2014-08-19T16:48:16.659,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/831">>: ok [ns_server:info,2014-08-19T16:48:16.662,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/830">>: ok [ns_server:info,2014-08-19T16:48:16.664,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/83">>: ok [ns_server:info,2014-08-19T16:48:16.666,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/829">>: ok [ns_server:info,2014-08-19T16:48:16.668,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/828">>: ok [ns_server:info,2014-08-19T16:48:16.671,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/827">>: ok [ns_server:info,2014-08-19T16:48:16.674,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/826">>: ok [ns_server:info,2014-08-19T16:48:16.677,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/825">>: ok [ns_server:info,2014-08-19T16:48:16.680,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/824">>: ok [ns_server:info,2014-08-19T16:48:16.682,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/823">>: ok [ns_server:info,2014-08-19T16:48:16.684,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/822">>: ok [ns_server:info,2014-08-19T16:48:16.686,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/821">>: ok [ns_server:info,2014-08-19T16:48:16.689,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/820">>: ok [ns_server:info,2014-08-19T16:48:16.692,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/82">>: ok [ns_server:info,2014-08-19T16:48:16.695,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/819">>: ok [ns_server:info,2014-08-19T16:48:16.697,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/818">>: ok [ns_server:info,2014-08-19T16:48:16.700,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/817">>: ok [ns_server:info,2014-08-19T16:48:16.702,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/816">>: ok [ns_server:info,2014-08-19T16:48:16.705,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/815">>: ok [ns_server:info,2014-08-19T16:48:16.708,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/814">>: ok [ns_server:info,2014-08-19T16:48:16.712,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/813">>: ok [ns_server:info,2014-08-19T16:48:16.715,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/812">>: ok [ns_server:info,2014-08-19T16:48:16.716,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/811">>: ok [ns_server:info,2014-08-19T16:48:16.719,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/810">>: ok [ns_server:info,2014-08-19T16:48:16.721,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/81">>: ok [ns_server:info,2014-08-19T16:48:16.724,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/809">>: ok [ns_server:info,2014-08-19T16:48:16.726,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/808">>: ok [ns_server:info,2014-08-19T16:48:16.728,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/807">>: ok [ns_server:info,2014-08-19T16:48:16.731,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/806">>: ok [ns_server:info,2014-08-19T16:48:16.733,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/805">>: ok [ns_server:info,2014-08-19T16:48:16.735,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/804">>: ok [ns_server:info,2014-08-19T16:48:16.737,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/803">>: ok [ns_server:info,2014-08-19T16:48:16.739,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/802">>: ok [ns_server:info,2014-08-19T16:48:16.742,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/801">>: ok [ns_server:info,2014-08-19T16:48:16.744,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/800">>: ok [ns_server:info,2014-08-19T16:48:16.745,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/80">>: ok [ns_server:info,2014-08-19T16:48:16.748,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/8">>: ok [ns_server:info,2014-08-19T16:48:16.752,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/799">>: ok [ns_server:info,2014-08-19T16:48:16.754,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/798">>: ok [ns_server:info,2014-08-19T16:48:16.758,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/797">>: ok [ns_server:info,2014-08-19T16:48:16.760,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/796">>: ok [ns_server:info,2014-08-19T16:48:16.763,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/795">>: ok [ns_server:info,2014-08-19T16:48:16.766,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/794">>: ok [ns_server:info,2014-08-19T16:48:16.769,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/793">>: ok [ns_server:info,2014-08-19T16:48:16.771,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/792">>: ok [ns_server:info,2014-08-19T16:48:16.775,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/791">>: ok [ns_server:info,2014-08-19T16:48:16.777,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/790">>: ok [ns_server:info,2014-08-19T16:48:16.780,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/79">>: ok [ns_server:info,2014-08-19T16:48:16.783,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/789">>: ok [ns_server:info,2014-08-19T16:48:16.786,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/788">>: ok [ns_server:info,2014-08-19T16:48:16.788,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/787">>: ok [ns_server:info,2014-08-19T16:48:16.791,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/786">>: ok [ns_server:info,2014-08-19T16:48:16.793,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/785">>: ok [ns_server:info,2014-08-19T16:48:16.796,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/784">>: ok [ns_server:info,2014-08-19T16:48:16.799,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/783">>: ok [ns_server:info,2014-08-19T16:48:16.801,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/782">>: ok [ns_server:info,2014-08-19T16:48:16.804,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/781">>: ok [ns_server:info,2014-08-19T16:48:16.807,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/780">>: ok [ns_server:info,2014-08-19T16:48:16.809,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/78">>: ok [ns_server:info,2014-08-19T16:48:16.812,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/779">>: ok [ns_server:info,2014-08-19T16:48:16.814,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/778">>: ok [ns_server:info,2014-08-19T16:48:16.817,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/777">>: ok [ns_server:info,2014-08-19T16:48:16.820,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/776">>: ok [ns_server:info,2014-08-19T16:48:16.823,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/775">>: ok [ns_server:info,2014-08-19T16:48:16.826,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/774">>: ok [ns_server:info,2014-08-19T16:48:16.828,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/773">>: ok [ns_server:info,2014-08-19T16:48:16.831,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/772">>: ok [ns_server:info,2014-08-19T16:48:16.834,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/771">>: ok [ns_server:info,2014-08-19T16:48:16.837,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/770">>: ok [ns_server:info,2014-08-19T16:48:16.839,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/77">>: ok [ns_server:info,2014-08-19T16:48:16.841,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/769">>: ok [ns_server:info,2014-08-19T16:48:16.843,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/768">>: ok [ns_server:info,2014-08-19T16:48:16.846,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/76">>: ok [ns_server:info,2014-08-19T16:48:16.848,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/75">>: ok [ns_server:info,2014-08-19T16:48:16.850,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/74">>: ok [ns_server:info,2014-08-19T16:48:16.852,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/73">>: ok [ns_server:info,2014-08-19T16:48:16.854,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/72">>: ok [ns_server:info,2014-08-19T16:48:16.856,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/71">>: ok [ns_server:info,2014-08-19T16:48:16.859,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/70">>: ok [ns_server:info,2014-08-19T16:48:16.861,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/7">>: ok [ns_server:info,2014-08-19T16:48:16.863,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/69">>: ok [ns_server:info,2014-08-19T16:48:16.865,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/68">>: ok [ns_server:info,2014-08-19T16:48:16.868,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/67">>: ok [ns_server:info,2014-08-19T16:48:16.870,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/66">>: ok [ns_server:info,2014-08-19T16:48:16.873,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/65">>: ok [ns_server:info,2014-08-19T16:48:16.875,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/64">>: ok [ns_server:info,2014-08-19T16:48:16.878,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/63">>: ok [ns_server:info,2014-08-19T16:48:16.880,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/62">>: ok [ns_server:info,2014-08-19T16:48:16.882,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/61">>: ok [ns_server:info,2014-08-19T16:48:16.884,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/60">>: ok [ns_server:info,2014-08-19T16:48:16.886,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/6">>: ok [ns_server:info,2014-08-19T16:48:16.887,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/596">>: ok [ns_server:info,2014-08-19T16:48:16.890,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/595">>: ok [ns_server:info,2014-08-19T16:48:16.892,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/594">>: ok [ns_server:info,2014-08-19T16:48:16.893,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/593">>: ok [ns_server:info,2014-08-19T16:48:16.895,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/592">>: ok [ns_server:info,2014-08-19T16:48:16.897,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/591">>: ok [ns_server:info,2014-08-19T16:48:16.899,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/590">>: ok [ns_server:info,2014-08-19T16:48:16.902,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/59">>: ok [ns_server:info,2014-08-19T16:48:16.904,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/589">>: ok [ns_server:info,2014-08-19T16:48:16.907,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/588">>: ok [ns_server:info,2014-08-19T16:48:16.909,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/587">>: ok [ns_server:info,2014-08-19T16:48:16.912,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/586">>: ok [ns_server:info,2014-08-19T16:48:16.914,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/585">>: ok [ns_server:info,2014-08-19T16:48:16.916,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/584">>: ok [ns_server:info,2014-08-19T16:48:16.918,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/583">>: ok [ns_server:info,2014-08-19T16:48:16.921,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/582">>: ok [ns_server:info,2014-08-19T16:48:16.922,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/581">>: ok [ns_server:info,2014-08-19T16:48:16.925,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/580">>: ok [ns_server:info,2014-08-19T16:48:16.927,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/58">>: ok [ns_server:info,2014-08-19T16:48:16.929,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/579">>: ok [ns_server:info,2014-08-19T16:48:16.932,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/578">>: ok [ns_server:info,2014-08-19T16:48:16.934,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/577">>: ok [ns_server:info,2014-08-19T16:48:16.937,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/576">>: ok [ns_server:info,2014-08-19T16:48:16.939,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/575">>: ok [ns_server:info,2014-08-19T16:48:16.942,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/574">>: ok [ns_server:info,2014-08-19T16:48:16.944,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/573">>: ok [ns_server:info,2014-08-19T16:48:16.947,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/572">>: ok [ns_server:info,2014-08-19T16:48:16.949,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/571">>: ok [ns_server:info,2014-08-19T16:48:16.951,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/570">>: ok [ns_server:info,2014-08-19T16:48:16.954,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/57">>: ok [ns_server:info,2014-08-19T16:48:16.956,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/569">>: ok [ns_server:info,2014-08-19T16:48:16.959,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/568">>: ok [ns_server:info,2014-08-19T16:48:16.961,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/567">>: ok [ns_server:info,2014-08-19T16:48:16.963,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/566">>: ok [ns_server:info,2014-08-19T16:48:16.966,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/565">>: ok [ns_server:info,2014-08-19T16:48:16.968,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/564">>: ok [ns_server:info,2014-08-19T16:48:16.970,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/563">>: ok [ns_server:info,2014-08-19T16:48:16.972,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/562">>: ok [ns_server:info,2014-08-19T16:48:16.975,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/561">>: ok [ns_server:info,2014-08-19T16:48:16.977,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/560">>: ok [ns_server:info,2014-08-19T16:48:16.980,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/56">>: ok [ns_server:info,2014-08-19T16:48:16.982,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/559">>: ok [ns_server:info,2014-08-19T16:48:16.984,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/558">>: ok [ns_server:info,2014-08-19T16:48:16.987,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/557">>: ok [ns_server:info,2014-08-19T16:48:16.989,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/556">>: ok [ns_server:info,2014-08-19T16:48:16.991,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/555">>: ok [ns_server:info,2014-08-19T16:48:16.993,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/554">>: ok [ns_server:info,2014-08-19T16:48:16.996,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/553">>: ok [ns_server:info,2014-08-19T16:48:16.998,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/552">>: ok [ns_server:info,2014-08-19T16:48:17.000,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/551">>: ok [ns_server:info,2014-08-19T16:48:17.002,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/550">>: ok [ns_server:info,2014-08-19T16:48:17.005,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/55">>: ok [ns_server:info,2014-08-19T16:48:17.007,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/549">>: ok [ns_server:info,2014-08-19T16:48:17.009,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/548">>: ok [ns_server:info,2014-08-19T16:48:17.011,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/547">>: ok [ns_server:info,2014-08-19T16:48:17.013,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/546">>: ok [ns_server:info,2014-08-19T16:48:17.016,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/545">>: ok [ns_server:info,2014-08-19T16:48:17.018,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/544">>: ok [ns_server:info,2014-08-19T16:48:17.020,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/543">>: ok [ns_server:info,2014-08-19T16:48:17.022,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/542">>: ok [ns_server:info,2014-08-19T16:48:17.024,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/541">>: ok [ns_server:info,2014-08-19T16:48:17.026,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/540">>: ok [ns_server:info,2014-08-19T16:48:17.028,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/54">>: ok [ns_server:info,2014-08-19T16:48:17.030,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/539">>: ok [ns_server:info,2014-08-19T16:48:17.032,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/538">>: ok [ns_server:info,2014-08-19T16:48:17.035,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/537">>: ok [ns_server:info,2014-08-19T16:48:17.037,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/536">>: ok [ns_server:info,2014-08-19T16:48:17.039,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/535">>: ok [ns_server:info,2014-08-19T16:48:17.041,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/534">>: ok [ns_server:info,2014-08-19T16:48:17.043,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/533">>: ok [ns_server:info,2014-08-19T16:48:17.045,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/532">>: ok [ns_server:info,2014-08-19T16:48:17.047,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/531">>: ok [ns_server:info,2014-08-19T16:48:17.049,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/530">>: ok [ns_server:info,2014-08-19T16:48:17.051,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/53">>: ok [ns_server:info,2014-08-19T16:48:17.054,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/529">>: ok [ns_server:info,2014-08-19T16:48:17.056,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/528">>: ok [ns_server:info,2014-08-19T16:48:17.058,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/527">>: ok [ns_server:info,2014-08-19T16:48:17.060,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/526">>: ok [ns_server:info,2014-08-19T16:48:17.062,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/525">>: ok [ns_server:info,2014-08-19T16:48:17.064,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/524">>: ok [ns_server:info,2014-08-19T16:48:17.066,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/523">>: ok [ns_server:info,2014-08-19T16:48:17.068,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/522">>: ok [ns_server:info,2014-08-19T16:48:17.070,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/521">>: ok [ns_server:info,2014-08-19T16:48:17.073,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/520">>: ok [ns_server:info,2014-08-19T16:48:17.075,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/52">>: ok [ns_server:info,2014-08-19T16:48:17.077,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/519">>: ok [ns_server:info,2014-08-19T16:48:17.078,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/518">>: ok [ns_server:info,2014-08-19T16:48:17.080,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/517">>: ok [ns_server:info,2014-08-19T16:48:17.082,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/516">>: ok [ns_server:info,2014-08-19T16:48:17.084,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/515">>: ok [ns_server:info,2014-08-19T16:48:17.086,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/514">>: ok [ns_server:info,2014-08-19T16:48:17.088,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/513">>: ok [ns_server:info,2014-08-19T16:48:17.090,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/512">>: ok [ns_server:info,2014-08-19T16:48:17.092,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/51">>: ok [ns_server:info,2014-08-19T16:48:17.094,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/50">>: ok [ns_server:info,2014-08-19T16:48:17.096,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/5">>: ok [ns_server:info,2014-08-19T16:48:17.098,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/49">>: ok [ns_server:info,2014-08-19T16:48:17.100,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/48">>: ok [ns_server:info,2014-08-19T16:48:17.102,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/47">>: ok [ns_server:info,2014-08-19T16:48:17.104,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/46">>: ok [ns_server:info,2014-08-19T16:48:17.106,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/45">>: ok [ns_server:info,2014-08-19T16:48:17.108,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/44">>: ok [ns_server:info,2014-08-19T16:48:17.110,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/43">>: ok [ns_server:info,2014-08-19T16:48:17.112,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/42">>: ok [ns_server:info,2014-08-19T16:48:17.114,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/41">>: ok [ns_server:info,2014-08-19T16:48:17.116,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/40">>: ok [ns_server:info,2014-08-19T16:48:17.118,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/4">>: ok [ns_server:info,2014-08-19T16:48:17.120,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/39">>: ok [ns_server:info,2014-08-19T16:48:17.122,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/38">>: ok [ns_server:info,2014-08-19T16:48:17.124,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/37">>: ok [ns_server:info,2014-08-19T16:48:17.126,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/36">>: ok [ns_server:info,2014-08-19T16:48:17.128,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/35">>: ok [ns_server:info,2014-08-19T16:48:17.130,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/341">>: ok [ns_server:info,2014-08-19T16:48:17.132,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/340">>: ok [ns_server:info,2014-08-19T16:48:17.134,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/34">>: ok [ns_server:info,2014-08-19T16:48:17.136,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/339">>: ok [ns_server:info,2014-08-19T16:48:17.138,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/338">>: ok [ns_server:info,2014-08-19T16:48:17.140,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/337">>: ok [ns_server:info,2014-08-19T16:48:17.142,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/336">>: ok [ns_server:info,2014-08-19T16:48:17.144,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/335">>: ok [ns_server:info,2014-08-19T16:48:17.145,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/334">>: ok [ns_server:info,2014-08-19T16:48:17.147,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/333">>: ok [ns_server:info,2014-08-19T16:48:17.148,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/332">>: ok [ns_server:info,2014-08-19T16:48:17.149,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/331">>: ok [ns_server:info,2014-08-19T16:48:17.151,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/330">>: ok [ns_server:info,2014-08-19T16:48:17.153,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/33">>: ok [ns_server:info,2014-08-19T16:48:17.155,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/329">>: ok [ns_server:info,2014-08-19T16:48:17.157,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/328">>: ok [ns_server:info,2014-08-19T16:48:17.158,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/327">>: ok [ns_server:info,2014-08-19T16:48:17.160,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/326">>: ok [ns_server:info,2014-08-19T16:48:17.161,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/325">>: ok [ns_server:info,2014-08-19T16:48:17.163,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/324">>: ok [ns_server:info,2014-08-19T16:48:17.164,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/323">>: ok [ns_server:info,2014-08-19T16:48:17.166,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/322">>: ok [ns_server:info,2014-08-19T16:48:17.168,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/321">>: ok [ns_server:info,2014-08-19T16:48:17.170,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/320">>: ok [ns_server:info,2014-08-19T16:48:17.171,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/32">>: ok [ns_server:info,2014-08-19T16:48:17.173,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/319">>: ok [ns_server:info,2014-08-19T16:48:17.175,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/318">>: ok [ns_server:info,2014-08-19T16:48:17.176,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/317">>: ok [ns_server:info,2014-08-19T16:48:17.177,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/316">>: ok [ns_server:info,2014-08-19T16:48:17.179,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/315">>: ok [ns_server:info,2014-08-19T16:48:17.180,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/314">>: ok [ns_server:info,2014-08-19T16:48:17.182,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/313">>: ok [ns_server:info,2014-08-19T16:48:17.184,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/312">>: ok [ns_server:info,2014-08-19T16:48:17.186,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/311">>: ok [ns_server:info,2014-08-19T16:48:17.188,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/310">>: ok [ns_server:info,2014-08-19T16:48:17.190,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/31">>: ok [ns_server:info,2014-08-19T16:48:17.192,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/309">>: ok [ns_server:info,2014-08-19T16:48:17.193,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/308">>: ok [ns_server:info,2014-08-19T16:48:17.195,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/307">>: ok [ns_server:info,2014-08-19T16:48:17.197,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/306">>: ok [ns_server:info,2014-08-19T16:48:17.198,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/305">>: ok [ns_server:info,2014-08-19T16:48:17.200,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/304">>: ok [ns_server:info,2014-08-19T16:48:17.201,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/303">>: ok [ns_server:info,2014-08-19T16:48:17.203,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/302">>: ok [ns_server:info,2014-08-19T16:48:17.204,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/301">>: ok [ns_server:info,2014-08-19T16:48:17.206,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/300">>: ok [ns_server:info,2014-08-19T16:48:17.208,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/30">>: ok [ns_server:info,2014-08-19T16:48:17.210,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/3">>: ok [ns_server:info,2014-08-19T16:48:17.212,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/299">>: ok [ns_server:info,2014-08-19T16:48:17.214,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/298">>: ok [ns_server:info,2014-08-19T16:48:17.215,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/297">>: ok [ns_server:info,2014-08-19T16:48:17.217,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/296">>: ok [ns_server:info,2014-08-19T16:48:17.219,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/295">>: ok [ns_server:info,2014-08-19T16:48:17.220,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/294">>: ok [ns_server:info,2014-08-19T16:48:17.222,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/293">>: ok [ns_server:info,2014-08-19T16:48:17.223,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/292">>: ok [ns_server:info,2014-08-19T16:48:17.225,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/291">>: ok [ns_server:info,2014-08-19T16:48:17.226,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/290">>: ok [ns_server:info,2014-08-19T16:48:17.228,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/29">>: ok [ns_server:info,2014-08-19T16:48:17.229,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/289">>: ok [ns_server:info,2014-08-19T16:48:17.231,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/288">>: ok [ns_server:info,2014-08-19T16:48:17.233,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/287">>: ok [ns_server:info,2014-08-19T16:48:17.234,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/286">>: ok [ns_server:info,2014-08-19T16:48:17.236,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/285">>: ok [ns_server:info,2014-08-19T16:48:17.237,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/284">>: ok [ns_server:info,2014-08-19T16:48:17.239,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/283">>: ok [ns_server:info,2014-08-19T16:48:17.240,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/282">>: ok [ns_server:info,2014-08-19T16:48:17.241,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/281">>: ok [ns_server:info,2014-08-19T16:48:17.243,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/280">>: ok [ns_server:info,2014-08-19T16:48:17.244,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/28">>: ok [ns_server:info,2014-08-19T16:48:17.245,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/279">>: ok [ns_server:info,2014-08-19T16:48:17.246,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/278">>: ok [ns_server:info,2014-08-19T16:48:17.248,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/277">>: ok [ns_server:info,2014-08-19T16:48:17.250,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/276">>: ok [ns_server:info,2014-08-19T16:48:17.252,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/275">>: ok [ns_server:info,2014-08-19T16:48:17.253,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/274">>: ok [ns_server:info,2014-08-19T16:48:17.254,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/273">>: ok [ns_server:info,2014-08-19T16:48:17.256,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/272">>: ok [ns_server:info,2014-08-19T16:48:17.257,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/271">>: ok [ns_server:info,2014-08-19T16:48:17.258,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/270">>: ok [ns_server:info,2014-08-19T16:48:17.259,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/27">>: ok [ns_server:info,2014-08-19T16:48:17.261,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/269">>: ok [ns_server:info,2014-08-19T16:48:17.262,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/268">>: ok [ns_server:info,2014-08-19T16:48:17.263,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/267">>: ok [ns_server:info,2014-08-19T16:48:17.265,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/266">>: ok [ns_server:info,2014-08-19T16:48:17.266,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/265">>: ok [ns_server:info,2014-08-19T16:48:17.268,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/264">>: ok [ns_server:info,2014-08-19T16:48:17.269,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/263">>: ok [ns_server:info,2014-08-19T16:48:17.271,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/262">>: ok [ns_server:info,2014-08-19T16:48:17.272,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/261">>: ok [ns_server:info,2014-08-19T16:48:17.274,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/260">>: ok [ns_server:info,2014-08-19T16:48:17.275,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/26">>: ok [ns_server:info,2014-08-19T16:48:17.277,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/259">>: ok [ns_server:info,2014-08-19T16:48:17.279,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/258">>: ok [ns_server:info,2014-08-19T16:48:17.280,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/257">>: ok [ns_server:info,2014-08-19T16:48:17.281,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/256">>: ok [ns_server:info,2014-08-19T16:48:17.283,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/255">>: ok [ns_server:info,2014-08-19T16:48:17.284,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/254">>: ok [ns_server:info,2014-08-19T16:48:17.285,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/253">>: ok [ns_server:info,2014-08-19T16:48:17.286,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/252">>: ok [ns_server:info,2014-08-19T16:48:17.287,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/251">>: ok [ns_server:info,2014-08-19T16:48:17.288,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/250">>: ok [ns_server:info,2014-08-19T16:48:17.290,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/25">>: ok [ns_server:info,2014-08-19T16:48:17.291,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/249">>: ok [ns_server:info,2014-08-19T16:48:17.292,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/248">>: ok [ns_server:info,2014-08-19T16:48:17.293,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/247">>: ok [ns_server:info,2014-08-19T16:48:17.294,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/246">>: ok [ns_server:info,2014-08-19T16:48:17.296,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/245">>: ok [ns_server:info,2014-08-19T16:48:17.297,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/244">>: ok [ns_server:info,2014-08-19T16:48:17.298,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/243">>: ok [ns_server:info,2014-08-19T16:48:17.300,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/242">>: ok [ns_server:info,2014-08-19T16:48:17.301,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/241">>: ok [ns_server:info,2014-08-19T16:48:17.302,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/240">>: ok [ns_server:info,2014-08-19T16:48:17.303,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/24">>: ok [ns_server:info,2014-08-19T16:48:17.305,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/239">>: ok [ns_server:info,2014-08-19T16:48:17.307,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/238">>: ok [ns_server:info,2014-08-19T16:48:17.308,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/237">>: ok [ns_server:info,2014-08-19T16:48:17.309,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/236">>: ok [ns_server:info,2014-08-19T16:48:17.310,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/235">>: ok [ns_server:info,2014-08-19T16:48:17.312,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/234">>: ok [ns_server:info,2014-08-19T16:48:17.313,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/233">>: ok [ns_server:info,2014-08-19T16:48:17.314,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/232">>: ok [ns_server:info,2014-08-19T16:48:17.316,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/231">>: ok [ns_server:info,2014-08-19T16:48:17.317,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/230">>: ok [ns_server:info,2014-08-19T16:48:17.318,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/23">>: ok [ns_server:info,2014-08-19T16:48:17.319,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/229">>: ok [ns_server:info,2014-08-19T16:48:17.320,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/228">>: ok [ns_server:info,2014-08-19T16:48:17.322,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/227">>: ok [ns_server:info,2014-08-19T16:48:17.323,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/226">>: ok [ns_server:info,2014-08-19T16:48:17.324,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/225">>: ok [ns_server:info,2014-08-19T16:48:17.326,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/224">>: ok [ns_server:info,2014-08-19T16:48:17.327,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/223">>: ok [ns_server:info,2014-08-19T16:48:17.328,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/222">>: ok [ns_server:info,2014-08-19T16:48:17.330,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/221">>: ok [ns_server:info,2014-08-19T16:48:17.331,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/220">>: ok [ns_server:info,2014-08-19T16:48:17.332,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/22">>: ok [ns_server:info,2014-08-19T16:48:17.334,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/219">>: ok [ns_server:info,2014-08-19T16:48:17.335,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/218">>: ok [ns_server:info,2014-08-19T16:48:17.336,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/217">>: ok [ns_server:info,2014-08-19T16:48:17.337,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/216">>: ok [ns_server:info,2014-08-19T16:48:17.338,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/215">>: ok [ns_server:info,2014-08-19T16:48:17.340,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/214">>: ok [ns_server:info,2014-08-19T16:48:17.341,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/213">>: ok [ns_server:info,2014-08-19T16:48:17.342,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/212">>: ok [ns_server:info,2014-08-19T16:48:17.344,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/211">>: ok [ns_server:info,2014-08-19T16:48:17.345,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/210">>: ok [ns_server:info,2014-08-19T16:48:17.346,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/21">>: ok [ns_server:info,2014-08-19T16:48:17.347,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/209">>: ok [ns_server:info,2014-08-19T16:48:17.348,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/208">>: ok [ns_server:info,2014-08-19T16:48:17.350,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/207">>: ok [ns_server:info,2014-08-19T16:48:17.351,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/206">>: ok [ns_server:info,2014-08-19T16:48:17.352,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/205">>: ok [ns_server:info,2014-08-19T16:48:17.353,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/204">>: ok [ns_server:info,2014-08-19T16:48:17.355,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/203">>: ok [ns_server:info,2014-08-19T16:48:17.356,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/202">>: ok [ns_server:info,2014-08-19T16:48:17.357,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/201">>: ok [ns_server:info,2014-08-19T16:48:17.359,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/200">>: ok [ns_server:info,2014-08-19T16:48:17.360,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/20">>: ok [ns_server:info,2014-08-19T16:48:17.361,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/2">>: ok [ns_server:info,2014-08-19T16:48:17.362,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/199">>: ok [ns_server:info,2014-08-19T16:48:17.363,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/198">>: ok [ns_server:info,2014-08-19T16:48:17.365,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/197">>: ok [ns_server:info,2014-08-19T16:48:17.366,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/196">>: ok [ns_server:info,2014-08-19T16:48:17.367,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/195">>: ok [ns_server:info,2014-08-19T16:48:17.368,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/194">>: ok [ns_server:info,2014-08-19T16:48:17.369,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/193">>: ok [ns_server:info,2014-08-19T16:48:17.370,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/192">>: ok [ns_server:info,2014-08-19T16:48:17.371,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/191">>: ok [ns_server:info,2014-08-19T16:48:17.372,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/190">>: ok [ns_server:info,2014-08-19T16:48:17.373,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/19">>: ok [ns_server:info,2014-08-19T16:48:17.375,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/189">>: ok [ns_server:info,2014-08-19T16:48:17.376,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/188">>: ok [ns_server:info,2014-08-19T16:48:17.377,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/187">>: ok [ns_server:info,2014-08-19T16:48:17.378,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/186">>: ok [ns_server:info,2014-08-19T16:48:17.379,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/185">>: ok [ns_server:info,2014-08-19T16:48:17.380,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/184">>: ok [ns_server:info,2014-08-19T16:48:17.381,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/183">>: ok [ns_server:info,2014-08-19T16:48:17.382,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/182">>: ok [ns_server:info,2014-08-19T16:48:17.383,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/181">>: ok [ns_server:info,2014-08-19T16:48:17.384,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/180">>: ok [ns_server:info,2014-08-19T16:48:17.385,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/18">>: ok [ns_server:info,2014-08-19T16:48:17.386,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/179">>: ok [ns_server:info,2014-08-19T16:48:17.387,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/178">>: ok [ns_server:info,2014-08-19T16:48:17.387,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/177">>: ok [ns_server:info,2014-08-19T16:48:17.388,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/176">>: ok [ns_server:info,2014-08-19T16:48:17.389,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/175">>: ok [ns_server:info,2014-08-19T16:48:17.390,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/174">>: ok [ns_server:info,2014-08-19T16:48:17.391,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/173">>: ok [ns_server:info,2014-08-19T16:48:17.392,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/172">>: ok [ns_server:info,2014-08-19T16:48:17.393,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/171">>: ok [ns_server:info,2014-08-19T16:48:17.394,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/170">>: ok [ns_server:info,2014-08-19T16:48:17.395,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/17">>: ok [ns_server:info,2014-08-19T16:48:17.396,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/169">>: ok [ns_server:info,2014-08-19T16:48:17.397,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/168">>: ok [ns_server:info,2014-08-19T16:48:17.398,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/167">>: ok [ns_server:info,2014-08-19T16:48:17.398,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/166">>: ok [ns_server:info,2014-08-19T16:48:17.400,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/165">>: ok [ns_server:info,2014-08-19T16:48:17.400,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/164">>: ok [ns_server:info,2014-08-19T16:48:17.401,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/163">>: ok [ns_server:info,2014-08-19T16:48:17.402,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/162">>: ok [ns_server:info,2014-08-19T16:48:17.403,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/161">>: ok [ns_server:info,2014-08-19T16:48:17.404,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/160">>: ok [ns_server:info,2014-08-19T16:48:17.405,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/16">>: ok [ns_server:info,2014-08-19T16:48:17.406,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/159">>: ok [ns_server:info,2014-08-19T16:48:17.406,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/158">>: ok [ns_server:info,2014-08-19T16:48:17.407,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/157">>: ok [ns_server:info,2014-08-19T16:48:17.408,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/156">>: ok [ns_server:info,2014-08-19T16:48:17.409,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/155">>: ok [ns_server:info,2014-08-19T16:48:17.410,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/154">>: ok [ns_server:info,2014-08-19T16:48:17.411,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/153">>: ok [ns_server:info,2014-08-19T16:48:17.412,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/152">>: ok [ns_server:info,2014-08-19T16:48:17.413,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/151">>: ok [ns_server:info,2014-08-19T16:48:17.414,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/150">>: ok [ns_server:info,2014-08-19T16:48:17.415,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/15">>: ok [ns_server:info,2014-08-19T16:48:17.416,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/149">>: ok [ns_server:info,2014-08-19T16:48:17.416,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/148">>: ok [ns_server:info,2014-08-19T16:48:17.417,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/147">>: ok [ns_server:info,2014-08-19T16:48:17.418,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/146">>: ok [ns_server:info,2014-08-19T16:48:17.419,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/145">>: ok [ns_server:info,2014-08-19T16:48:17.420,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/144">>: ok [ns_server:info,2014-08-19T16:48:17.421,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/143">>: ok [ns_server:info,2014-08-19T16:48:17.422,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/142">>: ok [ns_server:info,2014-08-19T16:48:17.423,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/141">>: ok [ns_server:info,2014-08-19T16:48:17.424,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/140">>: ok [ns_server:info,2014-08-19T16:48:17.425,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/14">>: ok [ns_server:info,2014-08-19T16:48:17.426,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/139">>: ok [ns_server:info,2014-08-19T16:48:17.427,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/138">>: ok [ns_server:info,2014-08-19T16:48:17.428,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/137">>: ok [ns_server:info,2014-08-19T16:48:17.429,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/136">>: ok [ns_server:info,2014-08-19T16:48:17.430,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/135">>: ok [ns_server:info,2014-08-19T16:48:17.430,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/134">>: ok [ns_server:info,2014-08-19T16:48:17.431,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/133">>: ok [ns_server:info,2014-08-19T16:48:17.432,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/132">>: ok [ns_server:info,2014-08-19T16:48:17.433,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/131">>: ok [ns_server:info,2014-08-19T16:48:17.434,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/130">>: ok [ns_server:info,2014-08-19T16:48:17.435,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/13">>: ok [ns_server:info,2014-08-19T16:48:17.436,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/129">>: ok [ns_server:info,2014-08-19T16:48:17.437,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/128">>: ok [ns_server:info,2014-08-19T16:48:17.438,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/127">>: ok [ns_server:info,2014-08-19T16:48:17.438,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/126">>: ok [ns_server:info,2014-08-19T16:48:17.439,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/125">>: ok [ns_server:info,2014-08-19T16:48:17.440,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/124">>: ok [ns_server:info,2014-08-19T16:48:17.441,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/123">>: ok [ns_server:info,2014-08-19T16:48:17.441,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/122">>: ok [ns_server:info,2014-08-19T16:48:17.442,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/121">>: ok [ns_server:info,2014-08-19T16:48:17.443,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/120">>: ok [ns_server:info,2014-08-19T16:48:17.444,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/12">>: ok [ns_server:info,2014-08-19T16:48:17.445,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/119">>: ok [ns_server:info,2014-08-19T16:48:17.445,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/118">>: ok [ns_server:info,2014-08-19T16:48:17.446,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/117">>: ok [ns_server:info,2014-08-19T16:48:17.447,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/116">>: ok [ns_server:info,2014-08-19T16:48:17.448,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/115">>: ok [ns_server:info,2014-08-19T16:48:17.448,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/114">>: ok [ns_server:info,2014-08-19T16:48:17.449,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/113">>: ok [ns_server:info,2014-08-19T16:48:17.450,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/112">>: ok [ns_server:info,2014-08-19T16:48:17.450,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/111">>: ok [ns_server:info,2014-08-19T16:48:17.451,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/110">>: ok [ns_server:info,2014-08-19T16:48:17.452,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/11">>: ok [ns_server:info,2014-08-19T16:48:17.452,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/109">>: ok [ns_server:info,2014-08-19T16:48:17.453,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/108">>: ok [ns_server:info,2014-08-19T16:48:17.454,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/107">>: ok [ns_server:info,2014-08-19T16:48:17.455,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/106">>: ok [ns_server:info,2014-08-19T16:48:17.455,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/105">>: ok [ns_server:info,2014-08-19T16:48:17.456,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/104">>: ok [ns_server:info,2014-08-19T16:48:17.457,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/103">>: ok [ns_server:info,2014-08-19T16:48:17.457,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/102">>: ok [ns_server:info,2014-08-19T16:48:17.458,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/101">>: ok [ns_server:info,2014-08-19T16:48:17.459,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/100">>: ok [ns_server:info,2014-08-19T16:48:17.459,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/10">>: ok [ns_server:info,2014-08-19T16:48:17.460,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/1">>: ok [ns_server:info,2014-08-19T16:48:17.461,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_couch_database:389]Deleting database <<"default/0">>: ok [ns_server:info,2014-08-19T16:48:17.461,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:delete_databases_and_files:436]Couch dbs are deleted. Proceeding with bucket directory [ns_server:debug,2014-08-19T16:48:17.461,ns_1@127.0.0.1:<0.18028.0>:ns_storage_conf:do_delete_bucket_indexes:457]indexes directory doesn't exist already. fine. [ns_server:debug,2014-08-19T16:48:17.462,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: rest -> [{port,8091}] [ns_server:debug,2014-08-19T16:48:17.462,ns_1@127.0.0.1:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([read_only_user_creds,rest,rest_creds]..) [ns_server:info,2014-08-19T16:48:17.462,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:handle_info:63]config change: rest_creds -> ******** [ns_server:debug,2014-08-19T16:48:17.462,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: read_only_user_creds -> null [user:info,2014-08-19T16:48:17.516,ns_1@127.0.0.1:<0.17911.0>:ns_log:crash_consumption_loop:64]Port server moxi on node 'babysitter_of_ns_1@127.0.0.1' exited with status 0. Restarting. Messages: EOL on stdin. Exiting [ns_server:debug,2014-08-19T16:48:17.553,ns_1@127.0.0.1:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [menelaus:info,2014-08-19T16:48:17.553,ns_1@127.0.0.1:<0.18022.0>:menelaus_web_buckets:do_bucket_create:495]Created bucket "default" of type: membase [{num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}] [ns_server:debug,2014-08-19T16:48:17.553,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[[{map,[]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,[]}]]}] [ns_server:debug,2014-08-19T16:48:17.554,ns_1@127.0.0.1:ns_config_isasl_sync<0.17914.0>:ns_config_isasl_sync:writeSASLConf:143]Writing isasl passwd file: "/opt/couchbase/var/lib/couchbase/isasl.pw" [ns_server:debug,2014-08-19T16:48:17.555,ns_1@127.0.0.1:<0.19172.0>:ns_janitor:cleanup_with_membase_bucket_check_servers:49]janitor decided to update servers list [ns_server:debug,2014-08-19T16:48:17.555,ns_1@127.0.0.1:ns_bucket_worker<0.18053.0>:ns_bucket_sup:update_childs:84]Starting new child: {{per_bucket_sup,"default"}, {single_bucket_sup,start_link,["default"]}, permanent,infinity,supervisor, [single_bucket_sup]} [ns_server:debug,2014-08-19T16:48:17.556,ns_1@127.0.0.1:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:48:17.556,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@127.0.0.1']}]}]}] [ns_server:debug,2014-08-19T16:48:17.559,ns_1@127.0.0.1:<0.19178.0>:janitor_agent:new_style_query_vbucket_states_loop:120]Exception from query_vbucket_states of "default":'ns_1@127.0.0.1' {'EXIT',{noproc,{gen_server,call, [{'janitor_agent-default','ns_1@127.0.0.1'}, query_vbucket_states,infinity]}}} [ns_server:debug,2014-08-19T16:48:17.559,ns_1@127.0.0.1:<0.19178.0>:janitor_agent:new_style_query_vbucket_states_loop_next_step:125]Waiting for "default" on 'ns_1@127.0.0.1' [error_logger:info,2014-08-19T16:48:17.587,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_bucket_sup} started: [{pid,<0.19183.0>}, {name,{per_bucket_sup,"default"}}, {mfargs,{single_bucket_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [ns_server:debug,2014-08-19T16:48:17.635,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:init:228]Usable vbuckets: [] [ns_server:debug,2014-08-19T16:48:17.635,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:17.635,ns_1@127.0.0.1:ns_memcached-default<0.19197.0>:ns_memcached:init:144]Starting ns_memcached [error_logger:info,2014-08-19T16:48:17.635,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.19185.0>}, {name,{capi_set_view_manager,"default"}}, {mfargs,{capi_set_view_manager,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:48:17.635,ns_1@127.0.0.1:<0.19198.0>:ns_memcached:run_connect_phase:167]Started 'connecting' phase of ns_memcached-default. Parent is <0.19197.0> [error_logger:info,2014-08-19T16:48:17.635,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.19197.0>}, {name,{ns_memcached,"default"}}, {mfargs,{ns_memcached,start_link,["default"]}}, {restart_type,permanent}, {shutdown,86400000}, {child_type,worker}] [error_logger:info,2014-08-19T16:48:17.637,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.19199.0>}, {name,{tap_replication_manager,"default"}}, {mfargs, {tap_replication_manager,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:48:17.638,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.19200.0>}, {name,{ns_vbm_new_sup,"default"}}, {mfargs,{ns_vbm_new_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [error_logger:info,2014-08-19T16:48:17.640,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.19201.0>}, {name,{ns_vbm_sup,"default"}}, {mfargs,{ns_vbm_sup,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [ns_server:info,2014-08-19T16:48:17.640,ns_1@127.0.0.1:janitor_agent-default<0.19202.0>:janitor_agent:read_flush_counter:936]Loading flushseq failed: {error,enoent}. Assuming it's equal to global config. [ns_server:info,2014-08-19T16:48:17.640,ns_1@127.0.0.1:janitor_agent-default<0.19202.0>:janitor_agent:read_flush_counter_from_config:943]Initialized flushseq 0 from bucket config [error_logger:info,2014-08-19T16:48:17.640,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.19202.0>}, {name,{janitor_agent,"default"}}, {mfargs,{janitor_agent,start_link,["default"]}}, {restart_type,permanent}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:debug,2014-08-19T16:48:17.642,ns_1@127.0.0.1:<0.18037.0>:mc_tcp_listener:accept_loop:31]Got new connection [error_logger:info,2014-08-19T16:48:17.642,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.19203.0>}, {name,{couch_stats_reader,"default"}}, {mfargs,{couch_stats_reader,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:48:17.642,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.19204.0>}, {name,{stats_collector,"default"}}, {mfargs,{stats_collector,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:48:17.643,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.19206.0>}, {name,{stats_archiver,"default"}}, {mfargs,{stats_archiver,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:48:17.643,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.19208.0>}, {name,{stats_reader,"default"}}, {mfargs,{stats_reader,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:48:17.644,ns_1@127.0.0.1:<0.18037.0>:mc_tcp_listener:accept_loop:33]Passed connection to mc_conn_sup: <0.19210.0> [error_logger:info,2014-08-19T16:48:17.644,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.19209.0>}, {name,{failover_safeness_level,"default"}}, {mfargs, {failover_safeness_level,start_link,["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [error_logger:info,2014-08-19T16:48:17.645,ns_1@127.0.0.1:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,'single_bucket_sup-default'} started: [{pid,<0.19211.0>}, {name,{terse_bucket_info_uploader,"default"}}, {mfargs, {terse_bucket_info_uploader,start_link, ["default"]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:info,2014-08-19T16:48:17.647,ns_1@127.0.0.1:ns_memcached-default<0.19197.0>:ns_memcached:ensure_bucket:1178]Created bucket "default" with config string "ht_size=3079;ht_locks=5;tap_noop_interval=20;max_txn_size=10000;max_size=13369344000;tap_keepalive=300;dbname=/var/lib/pgsql/default;allow_data_loss_during_shutdown=true;backend=couchdb;couch_bucket=default;couch_port=11213;max_vbuckets=1024;alog_path=/var/lib/pgsql/default/access.log;data_traffic_enabled=false;max_num_workers=3;uuid=d95ae85dc319bab78fd23c50f6adae2e;vb0=false;waitforwarmup=false;failpartialwarmup=false;" [ns_server:info,2014-08-19T16:48:17.647,ns_1@127.0.0.1:ns_memcached-default<0.19197.0>:ns_memcached:handle_cast:609]Main ns_memcached connection established: {ok,#Port<0.13090>} [ns_server:debug,2014-08-19T16:48:17.648,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [user:info,2014-08-19T16:48:17.648,ns_1@127.0.0.1:ns_memcached-default<0.19197.0>:ns_memcached:handle_cast:632]Bucket "default" loaded on node 'ns_1@127.0.0.1' in 0 seconds. [ns_server:debug,2014-08-19T16:48:17.838,ns_1@127.0.0.1:ns_heart_slow_status_updater<0.17944.0>:ns_heart:current_status_slow:261]Ignoring failure to get stats for bucket: "default": {error,no_samples} [ns_server:info,2014-08-19T16:48:18.562,ns_1@127.0.0.1:<0.19172.0>:ns_janitor:cleanup_with_membase_bucket_check_map:76]janitor decided to generate initial vbucket map [ns_server:debug,2014-08-19T16:48:18.575,ns_1@127.0.0.1:<0.19172.0>:mb_map:generate_map_chain:403]Natural map score: {1024,0,0} [ns_server:debug,2014-08-19T16:48:18.587,ns_1@127.0.0.1:<0.19172.0>:mb_map:generate_map_chain:410]Rnd maps scores: {1024,0,0}, {1024,0,0} [ns_server:debug,2014-08-19T16:48:18.587,ns_1@127.0.0.1:<0.19172.0>:mb_map:generate_map_chain:427]Considering 1 maps: [{1024,0,0}] [ns_server:debug,2014-08-19T16:48:18.587,ns_1@127.0.0.1:<0.19172.0>:mb_map:generate_map_chain:439]Best map score: {1024,0,0} (true,true,true) [ns_server:debug,2014-08-19T16:48:18.588,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:18.588,ns_1@127.0.0.1:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([vbucket_map_history]..) [ns_server:debug,2014-08-19T16:48:18.588,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:18.590,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:18.590,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: vbucket_map_history -> [{[['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1'|...], [...]|...], [{replication_topology,star},{tags,undefined},{max_slaves,10}]}] [ns_server:debug,2014-08-19T16:48:18.591,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:18.591,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:18.591,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:18.591,ns_1@127.0.0.1:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:48:18.601,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{0,[],['ns_1@127.0.0.1',undefined]}, {1,[],['ns_1@127.0.0.1',undefined]}, {2,[],['ns_1@127.0.0.1',undefined]}, {3,[],['ns_1@127.0.0.1',undefined]}, {4,[],['ns_1@127.0.0.1',undefined]}, {5,[],['ns_1@127.0.0.1',undefined]}, {6,[],['ns_1@127.0.0.1',undefined]}, {7,[],['ns_1@127.0.0.1',undefined]}, {8,[],['ns_1@127.0.0.1',undefined]}, {9,[],['ns_1@127.0.0.1',undefined]}, {10,[],['ns_1@127.0.0.1',undefined]}, {11,[],['ns_1@127.0.0.1',undefined]}, {12,[],['ns_1@127.0.0.1',undefined]}, {13,[],['ns_1@127.0.0.1',undefined]}, {14,[],['ns_1@127.0.0.1',undefined]}, {15,[],['ns_1@127.0.0.1',undefined]}, {16,[],['ns_1@127.0.0.1',undefined]}, {17,[],['ns_1@127.0.0.1',undefined]}, {18,[],['ns_1@127.0.0.1',undefined]}, {19,[],['ns_1@127.0.0.1',undefined]}, {20,[],['ns_1@127.0.0.1',undefined]}, {21,[],['ns_1@127.0.0.1',undefined]}, {22,[],['ns_1@127.0.0.1',undefined]}, {23,[],['ns_1@127.0.0.1',undefined]}, {24,[],['ns_1@127.0.0.1',undefined]}, {25,[],['ns_1@127.0.0.1',undefined]}, {26,[],['ns_1@127.0.0.1',undefined]}, {27,[],['ns_1@127.0.0.1',undefined]}, {28,[],['ns_1@127.0.0.1',undefined]}, {29,[],['ns_1@127.0.0.1',undefined]}, {30,[],['ns_1@127.0.0.1',undefined]}, {31,[],['ns_1@127.0.0.1',undefined]}, {32,[],['ns_1@127.0.0.1',undefined]}, {33,[],['ns_1@127.0.0.1',undefined]}, {34,[],['ns_1@127.0.0.1',undefined]}, {35,[],['ns_1@127.0.0.1',undefined]}, {36,[],['ns_1@127.0.0.1',undefined]}, {37,[],['ns_1@127.0.0.1',undefined]}, {38,[],['ns_1@127.0.0.1',undefined]}, {39,[],['ns_1@127.0.0.1',undefined]}, {40,[],['ns_1@127.0.0.1',undefined]}, {41,[],['ns_1@127.0.0.1',undefined]}, {42,[],['ns_1@127.0.0.1',undefined]}, {43,[],['ns_1@127.0.0.1',undefined]}, {44,[],['ns_1@127.0.0.1',undefined]}, {45,[],['ns_1@127.0.0.1',undefined]}, {46,[],['ns_1@127.0.0.1',undefined]}, {47,[],['ns_1@127.0.0.1',undefined]}, {48,[],['ns_1@127.0.0.1',undefined]}, {49,[],['ns_1@127.0.0.1',undefined]}, {50,[],['ns_1@127.0.0.1',undefined]}, {51,[],['ns_1@127.0.0.1',undefined]}, {52,[],['ns_1@127.0.0.1',undefined]}, {53,[],['ns_1@127.0.0.1',undefined]}, {54,[],['ns_1@127.0.0.1',undefined]}, {55,[],['ns_1@127.0.0.1',undefined]}, {56,[],['ns_1@127.0.0.1',undefined]}, {57,[],['ns_1@127.0.0.1',undefined]}, {58,[],['ns_1@127.0.0.1',undefined]}, {59,[],['ns_1@127.0.0.1',undefined]}, {60,[],['ns_1@127.0.0.1',undefined]}, {61,[],['ns_1@127.0.0.1',undefined]}, {62,[],['ns_1@127.0.0.1',undefined]}, {63,[],['ns_1@127.0.0.1',undefined]}, {64,[],['ns_1@127.0.0.1',undefined]}, {65,[],['ns_1@127.0.0.1',undefined]}, {66,[],['ns_1@127.0.0.1',undefined]}, {67,[],['ns_1@127.0.0.1',undefined]}, {68,[],['ns_1@127.0.0.1',undefined]}, {69,[],['ns_1@127.0.0.1',undefined]}, {70,[],['ns_1@127.0.0.1',undefined]}, {71,[],['ns_1@127.0.0.1',undefined]}, {72,[],['ns_1@127.0.0.1',undefined]}, {73,[],['ns_1@127.0.0.1',undefined]}, {74,[],['ns_1@127.0.0.1',undefined]}, {75,[],['ns_1@127.0.0.1',undefined]}, {76,[],['ns_1@127.0.0.1',undefined]}, {77,[],['ns_1@127.0.0.1',undefined]}, {78,[],['ns_1@127.0.0.1',undefined]}, {79,[],['ns_1@127.0.0.1',undefined]}, {80,[],['ns_1@127.0.0.1',undefined]}, {81,[],['ns_1@127.0.0.1',undefined]}, {82,[],['ns_1@127.0.0.1',undefined]}, {83,[],['ns_1@127.0.0.1',undefined]}, {84,[],['ns_1@127.0.0.1',undefined]}, {85,[],['ns_1@127.0.0.1'|...]}, {86,[],[...]}, {87,[],...}, {88,...}, {...}|...]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@127.0.0.1']}]}]}] [ns_server:debug,2014-08-19T16:48:18.601,ns_1@127.0.0.1:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@127.0.0.1']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:48:18.606,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1023 state to active [ns_server:info,2014-08-19T16:48:18.607,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1022 state to active [ns_server:info,2014-08-19T16:48:18.607,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1021 state to active [ns_server:info,2014-08-19T16:48:18.608,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1020 state to active [ns_server:info,2014-08-19T16:48:18.608,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1019 state to active [ns_server:info,2014-08-19T16:48:18.608,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1018 state to active [ns_server:info,2014-08-19T16:48:18.609,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1017 state to active [ns_server:info,2014-08-19T16:48:18.609,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1016 state to active [ns_server:info,2014-08-19T16:48:18.609,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1015 state to active [ns_server:info,2014-08-19T16:48:18.610,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1014 state to active [ns_server:info,2014-08-19T16:48:18.610,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1013 state to active [ns_server:info,2014-08-19T16:48:18.610,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1012 state to active [ns_server:info,2014-08-19T16:48:18.611,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1011 state to active [ns_server:info,2014-08-19T16:48:18.611,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1010 state to active [ns_server:info,2014-08-19T16:48:18.611,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1009 state to active [ns_server:info,2014-08-19T16:48:18.612,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1008 state to active [ns_server:info,2014-08-19T16:48:18.612,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1007 state to active [ns_server:info,2014-08-19T16:48:18.612,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1006 state to active [ns_server:info,2014-08-19T16:48:18.612,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1005 state to active [ns_server:info,2014-08-19T16:48:18.613,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1004 state to active [ns_server:info,2014-08-19T16:48:18.613,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1003 state to active [ns_server:info,2014-08-19T16:48:18.613,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1002 state to active [ns_server:info,2014-08-19T16:48:18.614,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1001 state to active [ns_server:info,2014-08-19T16:48:18.614,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1000 state to active [ns_server:info,2014-08-19T16:48:18.614,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 999 state to active [ns_server:info,2014-08-19T16:48:18.614,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 998 state to active [ns_server:info,2014-08-19T16:48:18.615,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 997 state to active [ns_server:info,2014-08-19T16:48:18.615,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 996 state to active [ns_server:info,2014-08-19T16:48:18.615,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 995 state to active [ns_server:info,2014-08-19T16:48:18.615,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 994 state to active [ns_server:info,2014-08-19T16:48:18.615,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 993 state to active [ns_server:info,2014-08-19T16:48:18.616,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 992 state to active [ns_server:info,2014-08-19T16:48:18.616,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 991 state to active [ns_server:info,2014-08-19T16:48:18.616,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 990 state to active [ns_server:info,2014-08-19T16:48:18.616,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 989 state to active [ns_server:info,2014-08-19T16:48:18.617,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 988 state to active [ns_server:info,2014-08-19T16:48:18.617,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 987 state to active [ns_server:info,2014-08-19T16:48:18.617,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 986 state to active [ns_server:info,2014-08-19T16:48:18.617,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 985 state to active [ns_server:info,2014-08-19T16:48:18.618,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 984 state to active [ns_server:info,2014-08-19T16:48:18.618,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 983 state to active [ns_server:info,2014-08-19T16:48:18.618,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 982 state to active [ns_server:info,2014-08-19T16:48:18.618,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 981 state to active [ns_server:info,2014-08-19T16:48:18.619,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 980 state to active [ns_server:info,2014-08-19T16:48:18.619,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 979 state to active [ns_server:info,2014-08-19T16:48:18.619,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 978 state to active [ns_server:info,2014-08-19T16:48:18.619,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 977 state to active [ns_server:info,2014-08-19T16:48:18.619,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 976 state to active [ns_server:info,2014-08-19T16:48:18.620,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 975 state to active [ns_server:info,2014-08-19T16:48:18.620,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 974 state to active [ns_server:info,2014-08-19T16:48:18.620,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 973 state to active [ns_server:info,2014-08-19T16:48:18.620,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 972 state to active [ns_server:info,2014-08-19T16:48:18.621,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 971 state to active [ns_server:info,2014-08-19T16:48:18.621,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 970 state to active [ns_server:info,2014-08-19T16:48:18.621,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 969 state to active [ns_server:info,2014-08-19T16:48:18.621,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 968 state to active [ns_server:info,2014-08-19T16:48:18.622,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 967 state to active [ns_server:info,2014-08-19T16:48:18.622,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 966 state to active [ns_server:info,2014-08-19T16:48:18.622,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 965 state to active [ns_server:info,2014-08-19T16:48:18.622,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 964 state to active [ns_server:info,2014-08-19T16:48:18.623,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 963 state to active [ns_server:info,2014-08-19T16:48:18.623,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 962 state to active [ns_server:info,2014-08-19T16:48:18.623,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 961 state to active [ns_server:info,2014-08-19T16:48:18.623,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 960 state to active [ns_server:info,2014-08-19T16:48:18.623,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 959 state to active [ns_server:info,2014-08-19T16:48:18.624,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 958 state to active [ns_server:info,2014-08-19T16:48:18.624,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 957 state to active [ns_server:info,2014-08-19T16:48:18.624,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 956 state to active [ns_server:info,2014-08-19T16:48:18.624,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 955 state to active [ns_server:info,2014-08-19T16:48:18.624,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 954 state to active [ns_server:info,2014-08-19T16:48:18.625,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 953 state to active [ns_server:info,2014-08-19T16:48:18.625,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 952 state to active [ns_server:info,2014-08-19T16:48:18.625,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 951 state to active [ns_server:info,2014-08-19T16:48:18.625,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 950 state to active [ns_server:info,2014-08-19T16:48:18.626,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 949 state to active [ns_server:info,2014-08-19T16:48:18.626,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 948 state to active [ns_server:info,2014-08-19T16:48:18.626,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 947 state to active [ns_server:info,2014-08-19T16:48:18.626,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 946 state to active [ns_server:info,2014-08-19T16:48:18.626,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 945 state to active [ns_server:info,2014-08-19T16:48:18.627,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 944 state to active [ns_server:info,2014-08-19T16:48:18.627,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 943 state to active [ns_server:info,2014-08-19T16:48:18.627,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 942 state to active [ns_server:info,2014-08-19T16:48:18.627,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 941 state to active [ns_server:info,2014-08-19T16:48:18.628,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 940 state to active [ns_server:info,2014-08-19T16:48:18.628,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 939 state to active [ns_server:info,2014-08-19T16:48:18.628,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 938 state to active [ns_server:info,2014-08-19T16:48:18.628,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 937 state to active [ns_server:info,2014-08-19T16:48:18.628,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 936 state to active [ns_server:info,2014-08-19T16:48:18.629,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 935 state to active [ns_server:info,2014-08-19T16:48:18.629,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 934 state to active [ns_server:info,2014-08-19T16:48:18.629,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 933 state to active [ns_server:info,2014-08-19T16:48:18.629,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 932 state to active [ns_server:info,2014-08-19T16:48:18.630,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 931 state to active [ns_server:info,2014-08-19T16:48:18.630,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 930 state to active [ns_server:info,2014-08-19T16:48:18.630,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 929 state to active [ns_server:info,2014-08-19T16:48:18.630,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 928 state to active [ns_server:info,2014-08-19T16:48:18.630,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 927 state to active [ns_server:info,2014-08-19T16:48:18.631,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 926 state to active [ns_server:info,2014-08-19T16:48:18.631,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 925 state to active [ns_server:info,2014-08-19T16:48:18.631,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 924 state to active [ns_server:info,2014-08-19T16:48:18.631,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 923 state to active [ns_server:info,2014-08-19T16:48:18.631,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 922 state to active [ns_server:info,2014-08-19T16:48:18.632,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 921 state to active [ns_server:info,2014-08-19T16:48:18.632,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 920 state to active [ns_server:info,2014-08-19T16:48:18.632,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 919 state to active [ns_server:info,2014-08-19T16:48:18.632,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 918 state to active [ns_server:info,2014-08-19T16:48:18.633,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 917 state to active [ns_server:info,2014-08-19T16:48:18.633,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 916 state to active [ns_server:info,2014-08-19T16:48:18.633,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 915 state to active [ns_server:info,2014-08-19T16:48:18.633,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 914 state to active [ns_server:info,2014-08-19T16:48:18.633,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 913 state to active [ns_server:info,2014-08-19T16:48:18.634,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 912 state to active [ns_server:info,2014-08-19T16:48:18.634,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 911 state to active [ns_server:info,2014-08-19T16:48:18.634,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 910 state to active [ns_server:info,2014-08-19T16:48:18.634,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 909 state to active [ns_server:info,2014-08-19T16:48:18.634,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 908 state to active [ns_server:info,2014-08-19T16:48:18.635,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 907 state to active [ns_server:info,2014-08-19T16:48:18.635,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 906 state to active [ns_server:info,2014-08-19T16:48:18.635,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 905 state to active [ns_server:info,2014-08-19T16:48:18.635,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 904 state to active [ns_server:info,2014-08-19T16:48:18.635,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 903 state to active [ns_server:info,2014-08-19T16:48:18.636,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 902 state to active [ns_server:info,2014-08-19T16:48:18.636,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 901 state to active [ns_server:info,2014-08-19T16:48:18.636,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 900 state to active [ns_server:info,2014-08-19T16:48:18.637,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 899 state to active [ns_server:info,2014-08-19T16:48:18.637,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 898 state to active [ns_server:info,2014-08-19T16:48:18.637,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 897 state to active [ns_server:info,2014-08-19T16:48:18.637,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 896 state to active [ns_server:info,2014-08-19T16:48:18.637,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 895 state to active [ns_server:info,2014-08-19T16:48:18.638,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 894 state to active [ns_server:info,2014-08-19T16:48:18.638,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 893 state to active [ns_server:info,2014-08-19T16:48:18.638,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 892 state to active [ns_server:info,2014-08-19T16:48:18.638,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 891 state to active [ns_server:info,2014-08-19T16:48:18.638,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 890 state to active [ns_server:info,2014-08-19T16:48:18.639,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 889 state to active [ns_server:info,2014-08-19T16:48:18.639,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 888 state to active [ns_server:info,2014-08-19T16:48:18.639,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 887 state to active [ns_server:info,2014-08-19T16:48:18.639,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 886 state to active [ns_server:info,2014-08-19T16:48:18.640,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 885 state to active [ns_server:info,2014-08-19T16:48:18.640,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 884 state to active [ns_server:info,2014-08-19T16:48:18.640,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 883 state to active [ns_server:info,2014-08-19T16:48:18.640,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 882 state to active [ns_server:info,2014-08-19T16:48:18.640,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 881 state to active [ns_server:info,2014-08-19T16:48:18.641,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 880 state to active [ns_server:info,2014-08-19T16:48:18.641,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 879 state to active [ns_server:info,2014-08-19T16:48:18.641,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 878 state to active [ns_server:info,2014-08-19T16:48:18.642,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 877 state to active [ns_server:info,2014-08-19T16:48:18.642,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 876 state to active [ns_server:info,2014-08-19T16:48:18.642,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 875 state to active [ns_server:info,2014-08-19T16:48:18.642,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 874 state to active [ns_server:info,2014-08-19T16:48:18.643,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 873 state to active [ns_server:info,2014-08-19T16:48:18.643,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 872 state to active [ns_server:info,2014-08-19T16:48:18.644,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 871 state to active [ns_server:info,2014-08-19T16:48:18.644,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 870 state to active [ns_server:info,2014-08-19T16:48:18.644,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 869 state to active [ns_server:info,2014-08-19T16:48:18.645,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 868 state to active [ns_server:info,2014-08-19T16:48:18.645,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 867 state to active [ns_server:info,2014-08-19T16:48:18.645,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 866 state to active [ns_server:info,2014-08-19T16:48:18.645,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 865 state to active [ns_server:info,2014-08-19T16:48:18.645,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 864 state to active [ns_server:info,2014-08-19T16:48:18.646,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 863 state to active [ns_server:info,2014-08-19T16:48:18.646,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 862 state to active [ns_server:info,2014-08-19T16:48:18.646,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 861 state to active [ns_server:info,2014-08-19T16:48:18.646,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 860 state to active [ns_server:info,2014-08-19T16:48:18.647,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 859 state to active [ns_server:info,2014-08-19T16:48:18.647,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 858 state to active [ns_server:info,2014-08-19T16:48:18.647,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 857 state to active [ns_server:info,2014-08-19T16:48:18.647,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 856 state to active [ns_server:info,2014-08-19T16:48:18.648,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 855 state to active [ns_server:info,2014-08-19T16:48:18.648,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 854 state to active [ns_server:info,2014-08-19T16:48:18.648,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 853 state to active [ns_server:info,2014-08-19T16:48:18.648,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 852 state to active [ns_server:info,2014-08-19T16:48:18.648,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 851 state to active [ns_server:info,2014-08-19T16:48:18.649,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 850 state to active [ns_server:info,2014-08-19T16:48:18.649,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 849 state to active [ns_server:info,2014-08-19T16:48:18.649,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 848 state to active [ns_server:info,2014-08-19T16:48:18.649,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 847 state to active [ns_server:info,2014-08-19T16:48:18.650,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 846 state to active [ns_server:info,2014-08-19T16:48:18.650,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 845 state to active [ns_server:info,2014-08-19T16:48:18.650,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 844 state to active [ns_server:info,2014-08-19T16:48:18.650,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 843 state to active [ns_server:info,2014-08-19T16:48:18.651,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 842 state to active [ns_server:info,2014-08-19T16:48:18.651,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 841 state to active [ns_server:info,2014-08-19T16:48:18.651,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 840 state to active [ns_server:info,2014-08-19T16:48:18.651,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 839 state to active [ns_server:info,2014-08-19T16:48:18.652,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 838 state to active [ns_server:info,2014-08-19T16:48:18.652,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 837 state to active [ns_server:info,2014-08-19T16:48:18.652,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 836 state to active [ns_server:info,2014-08-19T16:48:18.652,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 835 state to active [ns_server:info,2014-08-19T16:48:18.653,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 834 state to active [ns_server:info,2014-08-19T16:48:18.653,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 833 state to active [ns_server:info,2014-08-19T16:48:18.653,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 832 state to active [ns_server:info,2014-08-19T16:48:18.653,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 831 state to active [ns_server:info,2014-08-19T16:48:18.653,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 830 state to active [ns_server:info,2014-08-19T16:48:18.654,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 829 state to active [ns_server:info,2014-08-19T16:48:18.654,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 828 state to active [ns_server:info,2014-08-19T16:48:18.654,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 827 state to active [ns_server:info,2014-08-19T16:48:18.654,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 826 state to active [ns_server:info,2014-08-19T16:48:18.655,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 825 state to active [ns_server:info,2014-08-19T16:48:18.655,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 824 state to active [ns_server:info,2014-08-19T16:48:18.655,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 823 state to active [ns_server:info,2014-08-19T16:48:18.655,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 822 state to active [ns_server:info,2014-08-19T16:48:18.656,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 821 state to active [ns_server:info,2014-08-19T16:48:18.656,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 820 state to active [ns_server:info,2014-08-19T16:48:18.656,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 819 state to active [ns_server:info,2014-08-19T16:48:18.656,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 818 state to active [ns_server:info,2014-08-19T16:48:18.656,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 817 state to active [ns_server:info,2014-08-19T16:48:18.657,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 816 state to active [ns_server:info,2014-08-19T16:48:18.657,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 815 state to active [ns_server:info,2014-08-19T16:48:18.657,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 814 state to active [ns_server:info,2014-08-19T16:48:18.657,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 813 state to active [ns_server:info,2014-08-19T16:48:18.658,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 812 state to active [ns_server:info,2014-08-19T16:48:18.658,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 811 state to active [ns_server:info,2014-08-19T16:48:18.658,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 810 state to active [ns_server:info,2014-08-19T16:48:18.658,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 809 state to active [ns_server:info,2014-08-19T16:48:18.658,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 808 state to active [ns_server:info,2014-08-19T16:48:18.659,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 807 state to active [ns_server:info,2014-08-19T16:48:18.659,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 806 state to active [ns_server:info,2014-08-19T16:48:18.659,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 805 state to active [ns_server:info,2014-08-19T16:48:18.659,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 804 state to active [ns_server:info,2014-08-19T16:48:18.660,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 803 state to active [ns_server:info,2014-08-19T16:48:18.660,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 802 state to active [ns_server:info,2014-08-19T16:48:18.660,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 801 state to active [ns_server:info,2014-08-19T16:48:18.660,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 800 state to active [ns_server:info,2014-08-19T16:48:18.660,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 799 state to active [ns_server:info,2014-08-19T16:48:18.661,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 798 state to active [ns_server:info,2014-08-19T16:48:18.661,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 797 state to active [ns_server:info,2014-08-19T16:48:18.661,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 796 state to active [ns_server:info,2014-08-19T16:48:18.661,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 795 state to active [ns_server:info,2014-08-19T16:48:18.662,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 794 state to active [ns_server:info,2014-08-19T16:48:18.662,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 793 state to active [ns_server:info,2014-08-19T16:48:18.662,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 792 state to active [ns_server:info,2014-08-19T16:48:18.662,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 791 state to active [ns_server:info,2014-08-19T16:48:18.662,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 790 state to active [ns_server:info,2014-08-19T16:48:18.663,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 789 state to active [ns_server:info,2014-08-19T16:48:18.663,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 788 state to active [ns_server:info,2014-08-19T16:48:18.663,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 787 state to active [ns_server:info,2014-08-19T16:48:18.663,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 786 state to active [ns_server:info,2014-08-19T16:48:18.664,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 785 state to active [ns_server:info,2014-08-19T16:48:18.664,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 784 state to active [ns_server:info,2014-08-19T16:48:18.664,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 783 state to active [ns_server:info,2014-08-19T16:48:18.664,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 782 state to active [ns_server:info,2014-08-19T16:48:18.664,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 781 state to active [ns_server:info,2014-08-19T16:48:18.665,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 780 state to active [ns_server:info,2014-08-19T16:48:18.665,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 779 state to active [ns_server:info,2014-08-19T16:48:18.665,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 778 state to active [ns_server:info,2014-08-19T16:48:18.665,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 777 state to active [ns_server:info,2014-08-19T16:48:18.666,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 776 state to active [ns_server:info,2014-08-19T16:48:18.666,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 775 state to active [ns_server:info,2014-08-19T16:48:18.666,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 774 state to active [ns_server:info,2014-08-19T16:48:18.666,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 773 state to active [ns_server:info,2014-08-19T16:48:18.667,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 772 state to active [ns_server:info,2014-08-19T16:48:18.667,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 771 state to active [ns_server:info,2014-08-19T16:48:18.667,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 770 state to active [ns_server:info,2014-08-19T16:48:18.667,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 769 state to active [ns_server:info,2014-08-19T16:48:18.667,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 768 state to active [ns_server:info,2014-08-19T16:48:18.668,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 767 state to active [ns_server:info,2014-08-19T16:48:18.668,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 766 state to active [ns_server:info,2014-08-19T16:48:18.668,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 765 state to active [ns_server:info,2014-08-19T16:48:18.669,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 764 state to active [ns_server:info,2014-08-19T16:48:18.669,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 763 state to active [ns_server:info,2014-08-19T16:48:18.669,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 762 state to active [ns_server:info,2014-08-19T16:48:18.669,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 761 state to active [ns_server:info,2014-08-19T16:48:18.670,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 760 state to active [ns_server:info,2014-08-19T16:48:18.670,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 759 state to active [ns_server:info,2014-08-19T16:48:18.671,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 758 state to active [ns_server:info,2014-08-19T16:48:18.672,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 757 state to active [ns_server:info,2014-08-19T16:48:18.672,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 756 state to active [ns_server:info,2014-08-19T16:48:18.672,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 755 state to active [ns_server:info,2014-08-19T16:48:18.673,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 754 state to active [ns_server:info,2014-08-19T16:48:18.673,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 753 state to active [ns_server:info,2014-08-19T16:48:18.673,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 752 state to active [ns_server:info,2014-08-19T16:48:18.674,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 751 state to active [ns_server:info,2014-08-19T16:48:18.674,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 750 state to active [ns_server:info,2014-08-19T16:48:18.674,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 749 state to active [ns_server:info,2014-08-19T16:48:18.674,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 748 state to active [ns_server:info,2014-08-19T16:48:18.675,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 747 state to active [ns_server:info,2014-08-19T16:48:18.675,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 746 state to active [ns_server:info,2014-08-19T16:48:18.675,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 745 state to active [ns_server:info,2014-08-19T16:48:18.675,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 744 state to active [ns_server:info,2014-08-19T16:48:18.676,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 743 state to active [ns_server:info,2014-08-19T16:48:18.676,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 742 state to active [ns_server:info,2014-08-19T16:48:18.676,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 741 state to active [ns_server:info,2014-08-19T16:48:18.676,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 740 state to active [ns_server:info,2014-08-19T16:48:18.676,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 739 state to active [ns_server:info,2014-08-19T16:48:18.677,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 738 state to active [ns_server:info,2014-08-19T16:48:18.677,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 737 state to active [ns_server:info,2014-08-19T16:48:18.677,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 736 state to active [ns_server:info,2014-08-19T16:48:18.677,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 735 state to active [ns_server:info,2014-08-19T16:48:18.677,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 734 state to active [ns_server:info,2014-08-19T16:48:18.678,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 733 state to active [ns_server:info,2014-08-19T16:48:18.678,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 732 state to active [ns_server:info,2014-08-19T16:48:18.678,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 731 state to active [ns_server:info,2014-08-19T16:48:18.678,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 730 state to active [ns_server:info,2014-08-19T16:48:18.678,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 729 state to active [ns_server:info,2014-08-19T16:48:18.679,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 728 state to active [ns_server:info,2014-08-19T16:48:18.679,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 727 state to active [ns_server:info,2014-08-19T16:48:18.679,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 726 state to active [ns_server:info,2014-08-19T16:48:18.679,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 725 state to active [ns_server:info,2014-08-19T16:48:18.679,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 724 state to active [ns_server:info,2014-08-19T16:48:18.680,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 723 state to active [ns_server:info,2014-08-19T16:48:18.680,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 722 state to active [ns_server:info,2014-08-19T16:48:18.680,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 721 state to active [ns_server:info,2014-08-19T16:48:18.680,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 720 state to active [ns_server:info,2014-08-19T16:48:18.681,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 719 state to active [ns_server:info,2014-08-19T16:48:18.681,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 718 state to active [ns_server:info,2014-08-19T16:48:18.681,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 717 state to active [ns_server:info,2014-08-19T16:48:18.681,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 716 state to active [ns_server:info,2014-08-19T16:48:18.681,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 715 state to active [ns_server:info,2014-08-19T16:48:18.682,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 714 state to active [ns_server:info,2014-08-19T16:48:18.682,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 713 state to active [ns_server:info,2014-08-19T16:48:18.682,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 712 state to active [ns_server:info,2014-08-19T16:48:18.682,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 711 state to active [ns_server:info,2014-08-19T16:48:18.683,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 710 state to active [ns_server:info,2014-08-19T16:48:18.683,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 709 state to active [ns_server:info,2014-08-19T16:48:18.683,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 708 state to active [ns_server:info,2014-08-19T16:48:18.683,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 707 state to active [ns_server:info,2014-08-19T16:48:18.683,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 706 state to active [ns_server:info,2014-08-19T16:48:18.684,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 705 state to active [ns_server:info,2014-08-19T16:48:18.684,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 704 state to active [ns_server:info,2014-08-19T16:48:18.684,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 703 state to active [ns_server:info,2014-08-19T16:48:18.684,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 702 state to active [ns_server:info,2014-08-19T16:48:18.684,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 701 state to active [ns_server:info,2014-08-19T16:48:18.685,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 700 state to active [ns_server:info,2014-08-19T16:48:18.685,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 699 state to active [ns_server:info,2014-08-19T16:48:18.685,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 698 state to active [ns_server:info,2014-08-19T16:48:18.685,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 697 state to active [ns_server:info,2014-08-19T16:48:18.686,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 696 state to active [ns_server:info,2014-08-19T16:48:18.686,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 695 state to active [ns_server:info,2014-08-19T16:48:18.686,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 694 state to active [ns_server:info,2014-08-19T16:48:18.686,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 693 state to active [ns_server:info,2014-08-19T16:48:18.687,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 692 state to active [ns_server:info,2014-08-19T16:48:18.687,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 691 state to active [ns_server:info,2014-08-19T16:48:18.687,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 690 state to active [ns_server:info,2014-08-19T16:48:18.687,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 689 state to active [ns_server:info,2014-08-19T16:48:18.687,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 688 state to active [ns_server:info,2014-08-19T16:48:18.688,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 687 state to active [ns_server:info,2014-08-19T16:48:18.688,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 686 state to active [ns_server:info,2014-08-19T16:48:18.688,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 685 state to active [ns_server:info,2014-08-19T16:48:18.688,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 684 state to active [ns_server:info,2014-08-19T16:48:18.688,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 683 state to active [ns_server:info,2014-08-19T16:48:18.689,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 682 state to active [ns_server:info,2014-08-19T16:48:18.689,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 681 state to active [ns_server:info,2014-08-19T16:48:18.689,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 680 state to active [ns_server:info,2014-08-19T16:48:18.689,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 679 state to active [ns_server:info,2014-08-19T16:48:18.689,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 678 state to active [ns_server:info,2014-08-19T16:48:18.690,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 677 state to active [ns_server:info,2014-08-19T16:48:18.690,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 676 state to active [ns_server:info,2014-08-19T16:48:18.690,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 675 state to active [ns_server:info,2014-08-19T16:48:18.690,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 674 state to active [ns_server:info,2014-08-19T16:48:18.690,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 673 state to active [ns_server:info,2014-08-19T16:48:18.691,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 672 state to active [ns_server:info,2014-08-19T16:48:18.691,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 671 state to active [ns_server:info,2014-08-19T16:48:18.691,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 670 state to active [ns_server:info,2014-08-19T16:48:18.691,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 669 state to active [ns_server:info,2014-08-19T16:48:18.691,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 668 state to active [ns_server:info,2014-08-19T16:48:18.692,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 667 state to active [ns_server:info,2014-08-19T16:48:18.692,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 666 state to active [ns_server:info,2014-08-19T16:48:18.692,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 665 state to active [ns_server:info,2014-08-19T16:48:18.692,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 664 state to active [ns_server:info,2014-08-19T16:48:18.692,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 663 state to active [ns_server:info,2014-08-19T16:48:18.693,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 662 state to active [ns_server:info,2014-08-19T16:48:18.693,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 661 state to active [ns_server:info,2014-08-19T16:48:18.693,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 660 state to active [ns_server:info,2014-08-19T16:48:18.693,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 659 state to active [ns_server:info,2014-08-19T16:48:18.694,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 658 state to active [ns_server:info,2014-08-19T16:48:18.694,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 657 state to active [ns_server:info,2014-08-19T16:48:18.694,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 656 state to active [ns_server:info,2014-08-19T16:48:18.694,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 655 state to active [ns_server:info,2014-08-19T16:48:18.695,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 654 state to active [ns_server:info,2014-08-19T16:48:18.695,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 653 state to active [ns_server:info,2014-08-19T16:48:18.695,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 652 state to active [ns_server:info,2014-08-19T16:48:18.695,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 651 state to active [ns_server:info,2014-08-19T16:48:18.695,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 650 state to active [ns_server:info,2014-08-19T16:48:18.696,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 649 state to active [ns_server:info,2014-08-19T16:48:18.696,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 648 state to active [ns_server:info,2014-08-19T16:48:18.696,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 647 state to active [ns_server:info,2014-08-19T16:48:18.696,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 646 state to active [ns_server:info,2014-08-19T16:48:18.697,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 645 state to active [ns_server:info,2014-08-19T16:48:18.697,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 644 state to active [ns_server:info,2014-08-19T16:48:18.697,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 643 state to active [ns_server:info,2014-08-19T16:48:18.697,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 642 state to active [ns_server:info,2014-08-19T16:48:18.697,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 641 state to active [ns_server:info,2014-08-19T16:48:18.697,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 640 state to active [ns_server:info,2014-08-19T16:48:18.698,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 639 state to active [ns_server:info,2014-08-19T16:48:18.698,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 638 state to active [ns_server:info,2014-08-19T16:48:18.698,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 637 state to active [ns_server:info,2014-08-19T16:48:18.698,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 636 state to active [ns_server:info,2014-08-19T16:48:18.699,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 635 state to active [ns_server:info,2014-08-19T16:48:18.699,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 634 state to active [ns_server:info,2014-08-19T16:48:18.699,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 633 state to active [ns_server:info,2014-08-19T16:48:18.699,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 632 state to active [ns_server:info,2014-08-19T16:48:18.699,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 631 state to active [ns_server:info,2014-08-19T16:48:18.700,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 630 state to active [ns_server:info,2014-08-19T16:48:18.700,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 629 state to active [ns_server:info,2014-08-19T16:48:18.700,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 628 state to active [ns_server:info,2014-08-19T16:48:18.700,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 627 state to active [ns_server:info,2014-08-19T16:48:18.700,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 626 state to active [ns_server:info,2014-08-19T16:48:18.701,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 625 state to active [ns_server:info,2014-08-19T16:48:18.701,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 624 state to active [ns_server:info,2014-08-19T16:48:18.701,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 623 state to active [ns_server:info,2014-08-19T16:48:18.701,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 622 state to active [ns_server:info,2014-08-19T16:48:18.701,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 621 state to active [ns_server:info,2014-08-19T16:48:18.702,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 620 state to active [ns_server:info,2014-08-19T16:48:18.702,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 619 state to active [ns_server:info,2014-08-19T16:48:18.702,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 618 state to active [ns_server:info,2014-08-19T16:48:18.702,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 617 state to active [ns_server:info,2014-08-19T16:48:18.702,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 616 state to active [ns_server:info,2014-08-19T16:48:18.703,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 615 state to active [ns_server:info,2014-08-19T16:48:18.703,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 614 state to active [ns_server:info,2014-08-19T16:48:18.703,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 613 state to active [ns_server:info,2014-08-19T16:48:18.703,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 612 state to active [ns_server:info,2014-08-19T16:48:18.703,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 611 state to active [ns_server:info,2014-08-19T16:48:18.704,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 610 state to active [ns_server:info,2014-08-19T16:48:18.704,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 609 state to active [ns_server:info,2014-08-19T16:48:18.704,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 608 state to active [ns_server:info,2014-08-19T16:48:18.704,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 607 state to active [ns_server:info,2014-08-19T16:48:18.705,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 606 state to active [ns_server:info,2014-08-19T16:48:18.705,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 605 state to active [ns_server:info,2014-08-19T16:48:18.705,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 604 state to active [ns_server:info,2014-08-19T16:48:18.705,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 603 state to active [ns_server:info,2014-08-19T16:48:18.706,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 602 state to active [ns_server:info,2014-08-19T16:48:18.706,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 601 state to active [ns_server:info,2014-08-19T16:48:18.706,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 600 state to active [ns_server:info,2014-08-19T16:48:18.706,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 599 state to active [ns_server:info,2014-08-19T16:48:18.706,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 598 state to active [ns_server:info,2014-08-19T16:48:18.707,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 597 state to active [ns_server:info,2014-08-19T16:48:18.707,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 596 state to active [ns_server:info,2014-08-19T16:48:18.707,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 595 state to active [ns_server:info,2014-08-19T16:48:18.707,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 594 state to active [ns_server:info,2014-08-19T16:48:18.710,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 593 state to active [ns_server:info,2014-08-19T16:48:18.710,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 592 state to active [ns_server:info,2014-08-19T16:48:18.711,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 591 state to active [ns_server:info,2014-08-19T16:48:18.711,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 590 state to active [ns_server:info,2014-08-19T16:48:18.711,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 589 state to active [ns_server:info,2014-08-19T16:48:18.711,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 588 state to active [ns_server:info,2014-08-19T16:48:18.711,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 587 state to active [ns_server:info,2014-08-19T16:48:18.712,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 586 state to active [ns_server:info,2014-08-19T16:48:18.712,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 585 state to active [ns_server:info,2014-08-19T16:48:18.712,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 584 state to active [ns_server:info,2014-08-19T16:48:18.712,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 583 state to active [ns_server:info,2014-08-19T16:48:18.713,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 582 state to active [ns_server:info,2014-08-19T16:48:18.713,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 581 state to active [ns_server:info,2014-08-19T16:48:18.713,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 580 state to active [ns_server:info,2014-08-19T16:48:18.713,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 579 state to active [ns_server:info,2014-08-19T16:48:18.713,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 578 state to active [ns_server:info,2014-08-19T16:48:18.714,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 577 state to active [ns_server:info,2014-08-19T16:48:18.714,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 576 state to active [ns_server:info,2014-08-19T16:48:18.714,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 575 state to active [ns_server:info,2014-08-19T16:48:18.714,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 574 state to active [ns_server:info,2014-08-19T16:48:18.714,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 573 state to active [ns_server:info,2014-08-19T16:48:18.715,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 572 state to active [ns_server:info,2014-08-19T16:48:18.715,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 571 state to active [ns_server:info,2014-08-19T16:48:18.715,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 570 state to active [ns_server:info,2014-08-19T16:48:18.715,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 569 state to active [ns_server:info,2014-08-19T16:48:18.716,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 568 state to active [ns_server:info,2014-08-19T16:48:18.716,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 567 state to active [ns_server:info,2014-08-19T16:48:18.716,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 566 state to active [ns_server:info,2014-08-19T16:48:18.716,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 565 state to active [ns_server:info,2014-08-19T16:48:18.716,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 564 state to active [ns_server:info,2014-08-19T16:48:18.717,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 563 state to active [ns_server:info,2014-08-19T16:48:18.717,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 562 state to active [ns_server:info,2014-08-19T16:48:18.717,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 561 state to active [ns_server:info,2014-08-19T16:48:18.717,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 560 state to active [ns_server:info,2014-08-19T16:48:18.717,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 559 state to active [ns_server:info,2014-08-19T16:48:18.718,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 558 state to active [ns_server:info,2014-08-19T16:48:18.718,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 557 state to active [ns_server:info,2014-08-19T16:48:18.718,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 556 state to active [ns_server:info,2014-08-19T16:48:18.718,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 555 state to active [ns_server:info,2014-08-19T16:48:18.718,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 554 state to active [ns_server:info,2014-08-19T16:48:18.719,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 553 state to active [ns_server:info,2014-08-19T16:48:18.719,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 552 state to active [ns_server:info,2014-08-19T16:48:18.719,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 551 state to active [ns_server:info,2014-08-19T16:48:18.719,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 550 state to active [ns_server:info,2014-08-19T16:48:18.720,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 549 state to active [ns_server:info,2014-08-19T16:48:18.720,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 548 state to active [ns_server:info,2014-08-19T16:48:18.720,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 547 state to active [ns_server:info,2014-08-19T16:48:18.720,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 546 state to active [ns_server:info,2014-08-19T16:48:18.720,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 545 state to active [ns_server:info,2014-08-19T16:48:18.721,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 544 state to active [ns_server:info,2014-08-19T16:48:18.721,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 543 state to active [ns_server:info,2014-08-19T16:48:18.721,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 542 state to active [ns_server:info,2014-08-19T16:48:18.721,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 541 state to active [ns_server:info,2014-08-19T16:48:18.721,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 540 state to active [ns_server:info,2014-08-19T16:48:18.722,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 539 state to active [ns_server:info,2014-08-19T16:48:18.722,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 538 state to active [ns_server:info,2014-08-19T16:48:18.722,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 537 state to active [ns_server:info,2014-08-19T16:48:18.722,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 536 state to active [ns_server:info,2014-08-19T16:48:18.723,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 535 state to active [ns_server:info,2014-08-19T16:48:18.723,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 534 state to active [ns_server:info,2014-08-19T16:48:18.723,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 533 state to active [ns_server:info,2014-08-19T16:48:18.723,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 532 state to active [ns_server:info,2014-08-19T16:48:18.724,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 531 state to active [ns_server:info,2014-08-19T16:48:18.724,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 530 state to active [ns_server:info,2014-08-19T16:48:18.724,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 529 state to active [ns_server:info,2014-08-19T16:48:18.724,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 528 state to active [ns_server:info,2014-08-19T16:48:18.724,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 527 state to active [ns_server:info,2014-08-19T16:48:18.725,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 526 state to active [ns_server:info,2014-08-19T16:48:18.725,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 525 state to active [ns_server:info,2014-08-19T16:48:18.725,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 524 state to active [ns_server:info,2014-08-19T16:48:18.725,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 523 state to active [ns_server:info,2014-08-19T16:48:18.725,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 522 state to active [ns_server:info,2014-08-19T16:48:18.726,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 521 state to active [ns_server:info,2014-08-19T16:48:18.726,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 520 state to active [ns_server:info,2014-08-19T16:48:18.726,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 519 state to active [ns_server:info,2014-08-19T16:48:18.726,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 518 state to active [ns_server:info,2014-08-19T16:48:18.727,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 517 state to active [ns_server:info,2014-08-19T16:48:18.727,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 516 state to active [ns_server:info,2014-08-19T16:48:18.727,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 515 state to active [ns_server:info,2014-08-19T16:48:18.727,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 514 state to active [ns_server:info,2014-08-19T16:48:18.727,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 513 state to active [ns_server:info,2014-08-19T16:48:18.728,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 512 state to active [ns_server:info,2014-08-19T16:48:18.728,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 511 state to active [ns_server:info,2014-08-19T16:48:18.728,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 510 state to active [ns_server:info,2014-08-19T16:48:18.728,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 509 state to active [ns_server:info,2014-08-19T16:48:18.728,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 508 state to active [ns_server:info,2014-08-19T16:48:18.729,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 507 state to active [ns_server:info,2014-08-19T16:48:18.729,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 506 state to active [ns_server:info,2014-08-19T16:48:18.729,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 505 state to active [ns_server:info,2014-08-19T16:48:18.729,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 504 state to active [ns_server:debug,2014-08-19T16:48:18.730,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1023. Nacking mccouch update. [views:debug,2014-08-19T16:48:18.730,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1023. Updated state: active (0) [ns_server:info,2014-08-19T16:48:18.730,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 503 state to active [ns_server:debug,2014-08-19T16:48:18.730,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1023,active,0} [ns_server:debug,2014-08-19T16:48:18.730,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1023] [ns_server:info,2014-08-19T16:48:18.730,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 502 state to active [ns_server:info,2014-08-19T16:48:18.730,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 501 state to active [ns_server:info,2014-08-19T16:48:18.731,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 500 state to active [ns_server:info,2014-08-19T16:48:18.731,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 499 state to active [ns_server:info,2014-08-19T16:48:18.731,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 498 state to active [ns_server:info,2014-08-19T16:48:18.731,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 497 state to active [ns_server:info,2014-08-19T16:48:18.731,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 496 state to active [ns_server:info,2014-08-19T16:48:18.732,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 495 state to active [ns_server:info,2014-08-19T16:48:18.732,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 494 state to active [ns_server:info,2014-08-19T16:48:18.732,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 493 state to active [ns_server:info,2014-08-19T16:48:18.732,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 492 state to active [ns_server:info,2014-08-19T16:48:18.732,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 491 state to active [ns_server:info,2014-08-19T16:48:18.733,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 490 state to active [ns_server:info,2014-08-19T16:48:18.733,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 489 state to active [ns_server:info,2014-08-19T16:48:18.733,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 488 state to active [ns_server:info,2014-08-19T16:48:18.733,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 487 state to active [ns_server:info,2014-08-19T16:48:18.734,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 486 state to active [ns_server:info,2014-08-19T16:48:18.734,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 485 state to active [ns_server:info,2014-08-19T16:48:18.734,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 484 state to active [ns_server:info,2014-08-19T16:48:18.734,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 483 state to active [ns_server:info,2014-08-19T16:48:18.734,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 482 state to active [ns_server:info,2014-08-19T16:48:18.735,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 481 state to active [ns_server:info,2014-08-19T16:48:18.735,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 480 state to active [ns_server:info,2014-08-19T16:48:18.735,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 479 state to active [ns_server:info,2014-08-19T16:48:18.735,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 478 state to active [ns_server:info,2014-08-19T16:48:18.736,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 477 state to active [ns_server:info,2014-08-19T16:48:18.736,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 476 state to active [ns_server:info,2014-08-19T16:48:18.736,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 475 state to active [ns_server:info,2014-08-19T16:48:18.736,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 474 state to active [ns_server:info,2014-08-19T16:48:18.736,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 473 state to active [ns_server:info,2014-08-19T16:48:18.737,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 472 state to active [ns_server:info,2014-08-19T16:48:18.737,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 471 state to active [ns_server:info,2014-08-19T16:48:18.737,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 470 state to active [ns_server:info,2014-08-19T16:48:18.737,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 469 state to active [ns_server:info,2014-08-19T16:48:18.738,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 468 state to active [ns_server:info,2014-08-19T16:48:18.738,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 467 state to active [ns_server:info,2014-08-19T16:48:18.738,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 466 state to active [ns_server:info,2014-08-19T16:48:18.738,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 465 state to active [ns_server:info,2014-08-19T16:48:18.738,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 464 state to active [ns_server:info,2014-08-19T16:48:18.739,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 463 state to active [ns_server:info,2014-08-19T16:48:18.739,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 462 state to active [ns_server:info,2014-08-19T16:48:18.739,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 461 state to active [ns_server:info,2014-08-19T16:48:18.739,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 460 state to active [ns_server:info,2014-08-19T16:48:18.739,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 459 state to active [ns_server:info,2014-08-19T16:48:18.740,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 458 state to active [ns_server:info,2014-08-19T16:48:18.740,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 457 state to active [ns_server:info,2014-08-19T16:48:18.740,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 456 state to active [ns_server:info,2014-08-19T16:48:18.740,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 455 state to active [ns_server:info,2014-08-19T16:48:18.740,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 454 state to active [ns_server:info,2014-08-19T16:48:18.741,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 453 state to active [ns_server:info,2014-08-19T16:48:18.741,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 452 state to active [ns_server:info,2014-08-19T16:48:18.741,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 451 state to active [ns_server:info,2014-08-19T16:48:18.741,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 450 state to active [ns_server:info,2014-08-19T16:48:18.741,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 449 state to active [ns_server:info,2014-08-19T16:48:18.742,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 448 state to active [ns_server:info,2014-08-19T16:48:18.742,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 447 state to active [ns_server:info,2014-08-19T16:48:18.742,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 446 state to active [ns_server:info,2014-08-19T16:48:18.742,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 445 state to active [ns_server:info,2014-08-19T16:48:18.742,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 444 state to active [ns_server:info,2014-08-19T16:48:18.743,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 443 state to active [ns_server:info,2014-08-19T16:48:18.743,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 442 state to active [ns_server:info,2014-08-19T16:48:18.743,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 441 state to active [ns_server:info,2014-08-19T16:48:18.743,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 440 state to active [ns_server:info,2014-08-19T16:48:18.743,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 439 state to active [ns_server:info,2014-08-19T16:48:18.744,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 438 state to active [ns_server:info,2014-08-19T16:48:18.744,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 437 state to active [ns_server:info,2014-08-19T16:48:18.744,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 436 state to active [ns_server:info,2014-08-19T16:48:18.744,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 435 state to active [ns_server:info,2014-08-19T16:48:18.744,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 434 state to active [ns_server:info,2014-08-19T16:48:18.745,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 433 state to active [ns_server:info,2014-08-19T16:48:18.745,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 432 state to active [ns_server:info,2014-08-19T16:48:18.745,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 431 state to active [ns_server:info,2014-08-19T16:48:18.745,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 430 state to active [ns_server:info,2014-08-19T16:48:18.745,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 429 state to active [ns_server:info,2014-08-19T16:48:18.746,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 428 state to active [ns_server:info,2014-08-19T16:48:18.746,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 427 state to active [ns_server:info,2014-08-19T16:48:18.746,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 426 state to active [ns_server:info,2014-08-19T16:48:18.746,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 425 state to active [ns_server:info,2014-08-19T16:48:18.746,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 424 state to active [ns_server:info,2014-08-19T16:48:18.747,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 423 state to active [ns_server:info,2014-08-19T16:48:18.747,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 422 state to active [ns_server:info,2014-08-19T16:48:18.747,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 421 state to active [ns_server:info,2014-08-19T16:48:18.747,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 420 state to active [ns_server:info,2014-08-19T16:48:18.748,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 419 state to active [ns_server:info,2014-08-19T16:48:18.748,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 418 state to active [ns_server:info,2014-08-19T16:48:18.748,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 417 state to active [ns_server:info,2014-08-19T16:48:18.748,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 416 state to active [ns_server:info,2014-08-19T16:48:18.748,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 415 state to active [ns_server:info,2014-08-19T16:48:18.749,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 414 state to active [ns_server:info,2014-08-19T16:48:18.749,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 413 state to active [ns_server:info,2014-08-19T16:48:18.749,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 412 state to active [ns_server:info,2014-08-19T16:48:18.749,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 411 state to active [ns_server:info,2014-08-19T16:48:18.750,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 410 state to active [ns_server:info,2014-08-19T16:48:18.750,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 409 state to active [ns_server:info,2014-08-19T16:48:18.750,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 408 state to active [ns_server:info,2014-08-19T16:48:18.750,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 407 state to active [ns_server:info,2014-08-19T16:48:18.750,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 406 state to active [ns_server:info,2014-08-19T16:48:18.751,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 405 state to active [ns_server:info,2014-08-19T16:48:18.751,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 404 state to active [ns_server:info,2014-08-19T16:48:18.751,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 403 state to active [ns_server:info,2014-08-19T16:48:18.751,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 402 state to active [ns_server:info,2014-08-19T16:48:18.751,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 401 state to active [ns_server:info,2014-08-19T16:48:18.751,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 400 state to active [ns_server:info,2014-08-19T16:48:18.752,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 399 state to active [ns_server:info,2014-08-19T16:48:18.752,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 398 state to active [ns_server:info,2014-08-19T16:48:18.752,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 397 state to active [ns_server:info,2014-08-19T16:48:18.752,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 396 state to active [ns_server:info,2014-08-19T16:48:18.752,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 395 state to active [ns_server:info,2014-08-19T16:48:18.753,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 394 state to active [ns_server:info,2014-08-19T16:48:18.753,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 393 state to active [ns_server:info,2014-08-19T16:48:18.753,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 392 state to active [ns_server:info,2014-08-19T16:48:18.753,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 391 state to active [ns_server:info,2014-08-19T16:48:18.753,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 390 state to active [ns_server:info,2014-08-19T16:48:18.754,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 389 state to active [ns_server:info,2014-08-19T16:48:18.754,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 388 state to active [ns_server:info,2014-08-19T16:48:18.754,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 387 state to active [ns_server:info,2014-08-19T16:48:18.754,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 386 state to active [ns_server:info,2014-08-19T16:48:18.755,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 385 state to active [ns_server:info,2014-08-19T16:48:18.755,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 384 state to active [ns_server:info,2014-08-19T16:48:18.755,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 383 state to active [ns_server:info,2014-08-19T16:48:18.755,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 382 state to active [ns_server:info,2014-08-19T16:48:18.755,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 381 state to active [ns_server:info,2014-08-19T16:48:18.756,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 380 state to active [ns_server:info,2014-08-19T16:48:18.756,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 379 state to active [ns_server:info,2014-08-19T16:48:18.756,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 378 state to active [ns_server:info,2014-08-19T16:48:18.756,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 377 state to active [ns_server:info,2014-08-19T16:48:18.756,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 376 state to active [ns_server:info,2014-08-19T16:48:18.757,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 375 state to active [ns_server:info,2014-08-19T16:48:18.757,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 374 state to active [ns_server:info,2014-08-19T16:48:18.757,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 373 state to active [ns_server:info,2014-08-19T16:48:18.757,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 372 state to active [ns_server:info,2014-08-19T16:48:18.757,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 371 state to active [ns_server:info,2014-08-19T16:48:18.758,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 370 state to active [ns_server:info,2014-08-19T16:48:18.758,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 369 state to active [ns_server:info,2014-08-19T16:48:18.758,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 368 state to active [ns_server:info,2014-08-19T16:48:18.758,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 367 state to active [ns_server:info,2014-08-19T16:48:18.758,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 366 state to active [ns_server:info,2014-08-19T16:48:18.759,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 365 state to active [ns_server:info,2014-08-19T16:48:18.759,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 364 state to active [ns_server:info,2014-08-19T16:48:18.759,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 363 state to active [ns_server:info,2014-08-19T16:48:18.759,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 362 state to active [ns_server:info,2014-08-19T16:48:18.759,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 361 state to active [ns_server:info,2014-08-19T16:48:18.760,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 360 state to active [ns_server:info,2014-08-19T16:48:18.760,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 359 state to active [ns_server:info,2014-08-19T16:48:18.760,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 358 state to active [ns_server:info,2014-08-19T16:48:18.760,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 357 state to active [ns_server:info,2014-08-19T16:48:18.760,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 356 state to active [ns_server:info,2014-08-19T16:48:18.761,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 355 state to active [ns_server:info,2014-08-19T16:48:18.761,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 354 state to active [ns_server:info,2014-08-19T16:48:18.761,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 353 state to active [ns_server:info,2014-08-19T16:48:18.761,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 352 state to active [ns_server:info,2014-08-19T16:48:18.762,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 351 state to active [ns_server:info,2014-08-19T16:48:18.762,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 350 state to active [ns_server:info,2014-08-19T16:48:18.762,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 349 state to active [ns_server:info,2014-08-19T16:48:18.762,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 348 state to active [ns_server:info,2014-08-19T16:48:18.762,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 347 state to active [ns_server:info,2014-08-19T16:48:18.763,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 346 state to active [ns_server:info,2014-08-19T16:48:18.763,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 345 state to active [ns_server:info,2014-08-19T16:48:18.763,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 344 state to active [ns_server:info,2014-08-19T16:48:18.763,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 343 state to active [ns_server:info,2014-08-19T16:48:18.763,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 342 state to active [ns_server:info,2014-08-19T16:48:18.764,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 341 state to active [ns_server:info,2014-08-19T16:48:18.764,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 340 state to active [ns_server:info,2014-08-19T16:48:18.764,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 339 state to active [ns_server:info,2014-08-19T16:48:18.764,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 338 state to active [ns_server:info,2014-08-19T16:48:18.764,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 337 state to active [ns_server:info,2014-08-19T16:48:18.765,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 336 state to active [ns_server:info,2014-08-19T16:48:18.765,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 335 state to active [ns_server:info,2014-08-19T16:48:18.765,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 334 state to active [ns_server:info,2014-08-19T16:48:18.765,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 333 state to active [ns_server:info,2014-08-19T16:48:18.765,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 332 state to active [ns_server:info,2014-08-19T16:48:18.766,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 331 state to active [ns_server:info,2014-08-19T16:48:18.766,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 330 state to active [ns_server:info,2014-08-19T16:48:18.766,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 329 state to active [ns_server:info,2014-08-19T16:48:18.766,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 328 state to active [ns_server:info,2014-08-19T16:48:18.766,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 327 state to active [ns_server:info,2014-08-19T16:48:18.767,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 326 state to active [ns_server:info,2014-08-19T16:48:18.767,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 325 state to active [ns_server:info,2014-08-19T16:48:18.767,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 324 state to active [ns_server:info,2014-08-19T16:48:18.767,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 323 state to active [ns_server:info,2014-08-19T16:48:18.767,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 322 state to active [ns_server:info,2014-08-19T16:48:18.768,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 321 state to active [ns_server:info,2014-08-19T16:48:18.768,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 320 state to active [ns_server:info,2014-08-19T16:48:18.768,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 319 state to active [ns_server:info,2014-08-19T16:48:18.768,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 318 state to active [ns_server:info,2014-08-19T16:48:18.768,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 317 state to active [ns_server:info,2014-08-19T16:48:18.769,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 316 state to active [ns_server:info,2014-08-19T16:48:18.769,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 315 state to active [ns_server:info,2014-08-19T16:48:18.769,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 314 state to active [ns_server:info,2014-08-19T16:48:18.769,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 313 state to active [ns_server:info,2014-08-19T16:48:18.769,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 312 state to active [ns_server:info,2014-08-19T16:48:18.770,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 311 state to active [ns_server:info,2014-08-19T16:48:18.770,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 310 state to active [ns_server:info,2014-08-19T16:48:18.770,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 309 state to active [ns_server:info,2014-08-19T16:48:18.770,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 308 state to active [ns_server:info,2014-08-19T16:48:18.770,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 307 state to active [ns_server:info,2014-08-19T16:48:18.771,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 306 state to active [ns_server:info,2014-08-19T16:48:18.771,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 305 state to active [ns_server:info,2014-08-19T16:48:18.771,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 304 state to active [ns_server:info,2014-08-19T16:48:18.771,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 303 state to active [ns_server:info,2014-08-19T16:48:18.771,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 302 state to active [ns_server:info,2014-08-19T16:48:18.772,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 301 state to active [ns_server:info,2014-08-19T16:48:18.772,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 300 state to active [ns_server:info,2014-08-19T16:48:18.772,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 299 state to active [ns_server:info,2014-08-19T16:48:18.772,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 298 state to active [ns_server:info,2014-08-19T16:48:18.773,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 297 state to active [ns_server:info,2014-08-19T16:48:18.773,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 296 state to active [ns_server:info,2014-08-19T16:48:18.773,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 295 state to active [ns_server:info,2014-08-19T16:48:18.773,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 294 state to active [ns_server:info,2014-08-19T16:48:18.773,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 293 state to active [ns_server:info,2014-08-19T16:48:18.774,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 292 state to active [ns_server:info,2014-08-19T16:48:18.774,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 291 state to active [ns_server:info,2014-08-19T16:48:18.774,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 290 state to active [ns_server:info,2014-08-19T16:48:18.774,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 289 state to active [ns_server:info,2014-08-19T16:48:18.774,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 288 state to active [ns_server:info,2014-08-19T16:48:18.775,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 287 state to active [ns_server:info,2014-08-19T16:48:18.775,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 286 state to active [ns_server:info,2014-08-19T16:48:18.775,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 285 state to active [ns_server:info,2014-08-19T16:48:18.775,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 284 state to active [ns_server:info,2014-08-19T16:48:18.776,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 283 state to active [ns_server:info,2014-08-19T16:48:18.776,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 282 state to active [ns_server:info,2014-08-19T16:48:18.776,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 281 state to active [ns_server:info,2014-08-19T16:48:18.776,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 280 state to active [ns_server:info,2014-08-19T16:48:18.776,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 279 state to active [ns_server:info,2014-08-19T16:48:18.777,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 278 state to active [ns_server:info,2014-08-19T16:48:18.777,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 277 state to active [ns_server:info,2014-08-19T16:48:18.777,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 276 state to active [ns_server:info,2014-08-19T16:48:18.777,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 275 state to active [ns_server:info,2014-08-19T16:48:18.777,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 274 state to active [ns_server:info,2014-08-19T16:48:18.778,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 273 state to active [ns_server:info,2014-08-19T16:48:18.778,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 272 state to active [ns_server:info,2014-08-19T16:48:18.778,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 271 state to active [ns_server:info,2014-08-19T16:48:18.778,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 270 state to active [ns_server:info,2014-08-19T16:48:18.778,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 269 state to active [ns_server:info,2014-08-19T16:48:18.779,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 268 state to active [ns_server:info,2014-08-19T16:48:18.779,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 267 state to active [ns_server:info,2014-08-19T16:48:18.779,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 266 state to active [ns_server:info,2014-08-19T16:48:18.779,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 265 state to active [ns_server:info,2014-08-19T16:48:18.779,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 264 state to active [ns_server:info,2014-08-19T16:48:18.780,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 263 state to active [ns_server:info,2014-08-19T16:48:18.780,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 262 state to active [ns_server:info,2014-08-19T16:48:18.780,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 261 state to active [ns_server:info,2014-08-19T16:48:18.780,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 260 state to active [ns_server:info,2014-08-19T16:48:18.780,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 259 state to active [ns_server:info,2014-08-19T16:48:18.781,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 258 state to active [ns_server:info,2014-08-19T16:48:18.781,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 257 state to active [ns_server:info,2014-08-19T16:48:18.781,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 256 state to active [ns_server:info,2014-08-19T16:48:18.781,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 255 state to active [ns_server:info,2014-08-19T16:48:18.781,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 254 state to active [ns_server:info,2014-08-19T16:48:18.782,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 253 state to active [ns_server:info,2014-08-19T16:48:18.782,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 252 state to active [ns_server:info,2014-08-19T16:48:18.782,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 251 state to active [ns_server:info,2014-08-19T16:48:18.782,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 250 state to active [ns_server:info,2014-08-19T16:48:18.782,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 249 state to active [ns_server:info,2014-08-19T16:48:18.783,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 248 state to active [ns_server:info,2014-08-19T16:48:18.783,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 247 state to active [ns_server:info,2014-08-19T16:48:18.783,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 246 state to active [ns_server:info,2014-08-19T16:48:18.783,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 245 state to active [ns_server:info,2014-08-19T16:48:18.783,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 244 state to active [ns_server:info,2014-08-19T16:48:18.784,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 243 state to active [ns_server:info,2014-08-19T16:48:18.784,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 242 state to active [ns_server:info,2014-08-19T16:48:18.784,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 241 state to active [ns_server:info,2014-08-19T16:48:18.784,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 240 state to active [ns_server:info,2014-08-19T16:48:18.784,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 239 state to active [ns_server:info,2014-08-19T16:48:18.785,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 238 state to active [ns_server:info,2014-08-19T16:48:18.785,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 237 state to active [ns_server:info,2014-08-19T16:48:18.785,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 236 state to active [ns_server:info,2014-08-19T16:48:18.785,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 235 state to active [ns_server:info,2014-08-19T16:48:18.785,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 234 state to active [ns_server:info,2014-08-19T16:48:18.786,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 233 state to active [ns_server:info,2014-08-19T16:48:18.786,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 232 state to active [ns_server:info,2014-08-19T16:48:18.786,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 231 state to active [ns_server:info,2014-08-19T16:48:18.786,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 230 state to active [ns_server:info,2014-08-19T16:48:18.786,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 229 state to active [ns_server:info,2014-08-19T16:48:18.787,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 228 state to active [ns_server:info,2014-08-19T16:48:18.787,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 227 state to active [ns_server:info,2014-08-19T16:48:18.787,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 226 state to active [ns_server:info,2014-08-19T16:48:18.787,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 225 state to active [ns_server:info,2014-08-19T16:48:18.788,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 224 state to active [ns_server:info,2014-08-19T16:48:18.788,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 223 state to active [ns_server:info,2014-08-19T16:48:18.788,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 222 state to active [ns_server:info,2014-08-19T16:48:18.788,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 221 state to active [ns_server:info,2014-08-19T16:48:18.788,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 220 state to active [ns_server:info,2014-08-19T16:48:18.789,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 219 state to active [ns_server:info,2014-08-19T16:48:18.789,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 218 state to active [ns_server:info,2014-08-19T16:48:18.789,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 217 state to active [ns_server:info,2014-08-19T16:48:18.789,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 216 state to active [ns_server:info,2014-08-19T16:48:18.789,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 215 state to active [ns_server:info,2014-08-19T16:48:18.790,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 214 state to active [ns_server:info,2014-08-19T16:48:18.790,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 213 state to active [ns_server:info,2014-08-19T16:48:18.790,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 212 state to active [ns_server:info,2014-08-19T16:48:18.790,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 211 state to active [ns_server:info,2014-08-19T16:48:18.790,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 210 state to active [ns_server:info,2014-08-19T16:48:18.791,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 209 state to active [ns_server:info,2014-08-19T16:48:18.791,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 208 state to active [ns_server:info,2014-08-19T16:48:18.791,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 207 state to active [ns_server:info,2014-08-19T16:48:18.792,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 206 state to active [ns_server:info,2014-08-19T16:48:18.792,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 205 state to active [ns_server:info,2014-08-19T16:48:18.792,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 204 state to active [ns_server:info,2014-08-19T16:48:18.792,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 203 state to active [ns_server:info,2014-08-19T16:48:18.793,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 202 state to active [ns_server:info,2014-08-19T16:48:18.793,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 201 state to active [ns_server:info,2014-08-19T16:48:18.793,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 200 state to active [ns_server:info,2014-08-19T16:48:18.793,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 199 state to active [ns_server:info,2014-08-19T16:48:18.794,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 198 state to active [ns_server:info,2014-08-19T16:48:18.794,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 197 state to active [ns_server:info,2014-08-19T16:48:18.794,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 196 state to active [ns_server:info,2014-08-19T16:48:18.794,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 195 state to active [ns_server:info,2014-08-19T16:48:18.795,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 194 state to active [ns_server:info,2014-08-19T16:48:18.795,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 193 state to active [ns_server:info,2014-08-19T16:48:18.795,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 192 state to active [ns_server:info,2014-08-19T16:48:18.795,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 191 state to active [ns_server:info,2014-08-19T16:48:18.795,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 190 state to active [ns_server:info,2014-08-19T16:48:18.796,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 189 state to active [ns_server:info,2014-08-19T16:48:18.796,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 188 state to active [ns_server:info,2014-08-19T16:48:18.796,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 187 state to active [ns_server:info,2014-08-19T16:48:18.796,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 186 state to active [ns_server:info,2014-08-19T16:48:18.797,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 185 state to active [ns_server:info,2014-08-19T16:48:18.797,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 184 state to active [ns_server:info,2014-08-19T16:48:18.797,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 183 state to active [ns_server:info,2014-08-19T16:48:18.797,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 182 state to active [ns_server:info,2014-08-19T16:48:18.797,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 181 state to active [ns_server:info,2014-08-19T16:48:18.798,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 180 state to active [ns_server:info,2014-08-19T16:48:18.798,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 179 state to active [ns_server:info,2014-08-19T16:48:18.798,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 178 state to active [ns_server:info,2014-08-19T16:48:18.798,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 177 state to active [ns_server:info,2014-08-19T16:48:18.799,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 176 state to active [ns_server:info,2014-08-19T16:48:18.799,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 175 state to active [ns_server:info,2014-08-19T16:48:18.799,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 174 state to active [ns_server:info,2014-08-19T16:48:18.799,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 173 state to active [ns_server:info,2014-08-19T16:48:18.799,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 172 state to active [ns_server:info,2014-08-19T16:48:18.800,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 171 state to active [ns_server:info,2014-08-19T16:48:18.800,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 170 state to active [ns_server:info,2014-08-19T16:48:18.800,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 169 state to active [ns_server:info,2014-08-19T16:48:18.800,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 168 state to active [ns_server:info,2014-08-19T16:48:18.801,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 167 state to active [ns_server:info,2014-08-19T16:48:18.801,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 166 state to active [ns_server:info,2014-08-19T16:48:18.801,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 165 state to active [ns_server:info,2014-08-19T16:48:18.801,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 164 state to active [ns_server:info,2014-08-19T16:48:18.802,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 163 state to active [ns_server:info,2014-08-19T16:48:18.802,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 162 state to active [ns_server:info,2014-08-19T16:48:18.802,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 161 state to active [ns_server:info,2014-08-19T16:48:18.802,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 160 state to active [ns_server:info,2014-08-19T16:48:18.802,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 159 state to active [ns_server:info,2014-08-19T16:48:18.803,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 158 state to active [ns_server:info,2014-08-19T16:48:18.803,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 157 state to active [ns_server:info,2014-08-19T16:48:18.803,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 156 state to active [ns_server:info,2014-08-19T16:48:18.803,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 155 state to active [ns_server:info,2014-08-19T16:48:18.804,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 154 state to active [ns_server:info,2014-08-19T16:48:18.804,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 153 state to active [ns_server:info,2014-08-19T16:48:18.804,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 152 state to active [ns_server:info,2014-08-19T16:48:18.804,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 151 state to active [ns_server:info,2014-08-19T16:48:18.804,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 150 state to active [ns_server:info,2014-08-19T16:48:18.805,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 149 state to active [ns_server:info,2014-08-19T16:48:18.805,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 148 state to active [ns_server:info,2014-08-19T16:48:18.805,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 147 state to active [ns_server:info,2014-08-19T16:48:18.805,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 146 state to active [ns_server:info,2014-08-19T16:48:18.805,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 145 state to active [ns_server:info,2014-08-19T16:48:18.806,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 144 state to active [ns_server:info,2014-08-19T16:48:18.806,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 143 state to active [ns_server:info,2014-08-19T16:48:18.806,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 142 state to active [ns_server:info,2014-08-19T16:48:18.806,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 141 state to active [ns_server:info,2014-08-19T16:48:18.806,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 140 state to active [ns_server:info,2014-08-19T16:48:18.807,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 139 state to active [ns_server:info,2014-08-19T16:48:18.807,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 138 state to active [ns_server:info,2014-08-19T16:48:18.807,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 137 state to active [ns_server:info,2014-08-19T16:48:18.807,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 136 state to active [ns_server:info,2014-08-19T16:48:18.808,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 135 state to active [ns_server:info,2014-08-19T16:48:18.808,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 134 state to active [ns_server:info,2014-08-19T16:48:18.808,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 133 state to active [ns_server:info,2014-08-19T16:48:18.808,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 132 state to active [ns_server:info,2014-08-19T16:48:18.808,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 131 state to active [ns_server:info,2014-08-19T16:48:18.809,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 130 state to active [ns_server:info,2014-08-19T16:48:18.809,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 129 state to active [ns_server:info,2014-08-19T16:48:18.809,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 128 state to active [ns_server:info,2014-08-19T16:48:18.809,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 127 state to active [ns_server:info,2014-08-19T16:48:18.809,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 126 state to active [ns_server:info,2014-08-19T16:48:18.810,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 125 state to active [ns_server:info,2014-08-19T16:48:18.810,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 124 state to active [ns_server:info,2014-08-19T16:48:18.810,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 123 state to active [ns_server:info,2014-08-19T16:48:18.810,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 122 state to active [ns_server:info,2014-08-19T16:48:18.810,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 121 state to active [ns_server:info,2014-08-19T16:48:18.811,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 120 state to active [ns_server:info,2014-08-19T16:48:18.811,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 119 state to active [ns_server:info,2014-08-19T16:48:18.811,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 118 state to active [ns_server:info,2014-08-19T16:48:18.811,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 117 state to active [ns_server:info,2014-08-19T16:48:18.812,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 116 state to active [ns_server:info,2014-08-19T16:48:18.812,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 115 state to active [ns_server:info,2014-08-19T16:48:18.812,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 114 state to active [ns_server:info,2014-08-19T16:48:18.812,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 113 state to active [ns_server:info,2014-08-19T16:48:18.812,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 112 state to active [ns_server:info,2014-08-19T16:48:18.813,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 111 state to active [ns_server:info,2014-08-19T16:48:18.813,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 110 state to active [ns_server:info,2014-08-19T16:48:18.813,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 109 state to active [views:debug,2014-08-19T16:48:18.813,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1023. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:18.813,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1023,active,0} [ns_server:info,2014-08-19T16:48:18.814,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 108 state to active [ns_server:info,2014-08-19T16:48:18.814,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 107 state to active [ns_server:info,2014-08-19T16:48:18.814,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 106 state to active [ns_server:info,2014-08-19T16:48:18.815,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 105 state to active [ns_server:info,2014-08-19T16:48:18.815,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 104 state to active [ns_server:info,2014-08-19T16:48:18.816,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 103 state to active [ns_server:info,2014-08-19T16:48:18.816,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 102 state to active [ns_server:info,2014-08-19T16:48:18.816,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 101 state to active [ns_server:info,2014-08-19T16:48:18.817,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 100 state to active [ns_server:info,2014-08-19T16:48:18.817,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 99 state to active [ns_server:info,2014-08-19T16:48:18.817,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 98 state to active [ns_server:info,2014-08-19T16:48:18.817,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 97 state to active [ns_server:info,2014-08-19T16:48:18.818,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 96 state to active [ns_server:info,2014-08-19T16:48:18.818,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 95 state to active [ns_server:info,2014-08-19T16:48:18.818,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 94 state to active [ns_server:info,2014-08-19T16:48:18.818,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 93 state to active [ns_server:info,2014-08-19T16:48:18.818,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 92 state to active [ns_server:info,2014-08-19T16:48:18.819,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 91 state to active [ns_server:info,2014-08-19T16:48:18.819,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 90 state to active [ns_server:info,2014-08-19T16:48:18.819,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 89 state to active [ns_server:info,2014-08-19T16:48:18.819,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 88 state to active [ns_server:info,2014-08-19T16:48:18.819,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 87 state to active [ns_server:info,2014-08-19T16:48:18.820,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 86 state to active [ns_server:info,2014-08-19T16:48:18.820,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 85 state to active [ns_server:info,2014-08-19T16:48:18.820,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 84 state to active [ns_server:info,2014-08-19T16:48:18.820,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 83 state to active [ns_server:info,2014-08-19T16:48:18.821,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 82 state to active [ns_server:info,2014-08-19T16:48:18.821,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 81 state to active [ns_server:info,2014-08-19T16:48:18.821,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 80 state to active [ns_server:info,2014-08-19T16:48:18.821,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 79 state to active [ns_server:info,2014-08-19T16:48:18.821,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 78 state to active [ns_server:info,2014-08-19T16:48:18.822,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 77 state to active [ns_server:info,2014-08-19T16:48:18.822,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 76 state to active [ns_server:info,2014-08-19T16:48:18.822,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 75 state to active [ns_server:info,2014-08-19T16:48:18.822,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 74 state to active [ns_server:info,2014-08-19T16:48:18.822,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 73 state to active [ns_server:info,2014-08-19T16:48:18.823,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 72 state to active [ns_server:info,2014-08-19T16:48:18.823,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 71 state to active [ns_server:info,2014-08-19T16:48:18.823,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 70 state to active [ns_server:info,2014-08-19T16:48:18.823,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 69 state to active [ns_server:info,2014-08-19T16:48:18.823,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 68 state to active [ns_server:info,2014-08-19T16:48:18.824,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 67 state to active [ns_server:info,2014-08-19T16:48:18.824,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 66 state to active [ns_server:info,2014-08-19T16:48:18.824,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 65 state to active [ns_server:info,2014-08-19T16:48:18.824,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 64 state to active [ns_server:info,2014-08-19T16:48:18.825,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 63 state to active [ns_server:info,2014-08-19T16:48:18.825,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 62 state to active [ns_server:info,2014-08-19T16:48:18.825,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 61 state to active [ns_server:info,2014-08-19T16:48:18.825,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 60 state to active [ns_server:info,2014-08-19T16:48:18.825,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 59 state to active [ns_server:info,2014-08-19T16:48:18.826,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 58 state to active [ns_server:info,2014-08-19T16:48:18.826,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 57 state to active [ns_server:info,2014-08-19T16:48:18.826,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 56 state to active [ns_server:info,2014-08-19T16:48:18.826,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 55 state to active [ns_server:info,2014-08-19T16:48:18.826,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 54 state to active [ns_server:info,2014-08-19T16:48:18.827,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 53 state to active [ns_server:info,2014-08-19T16:48:18.827,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 52 state to active [ns_server:info,2014-08-19T16:48:18.827,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 51 state to active [ns_server:info,2014-08-19T16:48:18.827,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 50 state to active [ns_server:info,2014-08-19T16:48:18.827,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 49 state to active [ns_server:info,2014-08-19T16:48:18.828,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 48 state to active [ns_server:info,2014-08-19T16:48:18.828,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 47 state to active [ns_server:info,2014-08-19T16:48:18.828,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 46 state to active [ns_server:info,2014-08-19T16:48:18.828,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 45 state to active [ns_server:info,2014-08-19T16:48:18.829,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 44 state to active [ns_server:info,2014-08-19T16:48:18.829,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 43 state to active [ns_server:info,2014-08-19T16:48:18.829,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 42 state to active [ns_server:info,2014-08-19T16:48:18.829,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 41 state to active [ns_server:info,2014-08-19T16:48:18.830,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 40 state to active [ns_server:info,2014-08-19T16:48:18.830,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 39 state to active [ns_server:info,2014-08-19T16:48:18.830,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 38 state to active [ns_server:info,2014-08-19T16:48:18.830,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 37 state to active [ns_server:info,2014-08-19T16:48:18.830,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 36 state to active [ns_server:info,2014-08-19T16:48:18.831,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 35 state to active [ns_server:info,2014-08-19T16:48:18.831,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 34 state to active [ns_server:info,2014-08-19T16:48:18.831,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 33 state to active [ns_server:info,2014-08-19T16:48:18.831,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 32 state to active [ns_server:info,2014-08-19T16:48:18.831,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 31 state to active [ns_server:info,2014-08-19T16:48:18.832,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 30 state to active [ns_server:info,2014-08-19T16:48:18.832,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 29 state to active [ns_server:info,2014-08-19T16:48:18.832,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 28 state to active [ns_server:info,2014-08-19T16:48:18.832,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 27 state to active [ns_server:info,2014-08-19T16:48:18.832,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 26 state to active [ns_server:info,2014-08-19T16:48:18.833,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 25 state to active [ns_server:info,2014-08-19T16:48:18.833,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 24 state to active [ns_server:info,2014-08-19T16:48:18.833,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 23 state to active [ns_server:info,2014-08-19T16:48:18.833,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 22 state to active [ns_server:info,2014-08-19T16:48:18.833,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 21 state to active [ns_server:info,2014-08-19T16:48:18.834,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 20 state to active [ns_server:info,2014-08-19T16:48:18.834,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 19 state to active [ns_server:info,2014-08-19T16:48:18.834,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 18 state to active [ns_server:info,2014-08-19T16:48:18.834,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 17 state to active [ns_server:info,2014-08-19T16:48:18.835,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 16 state to active [ns_server:info,2014-08-19T16:48:18.835,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 15 state to active [ns_server:info,2014-08-19T16:48:18.835,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 14 state to active [ns_server:info,2014-08-19T16:48:18.835,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 13 state to active [ns_server:info,2014-08-19T16:48:18.835,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 12 state to active [ns_server:info,2014-08-19T16:48:18.836,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 11 state to active [ns_server:info,2014-08-19T16:48:18.836,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 10 state to active [ns_server:info,2014-08-19T16:48:18.836,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 9 state to active [ns_server:info,2014-08-19T16:48:18.836,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 8 state to active [ns_server:info,2014-08-19T16:48:18.836,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 7 state to active [ns_server:info,2014-08-19T16:48:18.837,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 6 state to active [ns_server:info,2014-08-19T16:48:18.837,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 5 state to active [ns_server:info,2014-08-19T16:48:18.837,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 4 state to active [ns_server:info,2014-08-19T16:48:18.837,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 3 state to active [ns_server:info,2014-08-19T16:48:18.838,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 2 state to active [ns_server:info,2014-08-19T16:48:18.838,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1 state to active [ns_server:info,2014-08-19T16:48:18.838,ns_1@127.0.0.1:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 0 state to active [ns_server:info,2014-08-19T16:48:18.839,ns_1@127.0.0.1:ns_memcached-default<0.19197.0>:ns_memcached:handle_call:247]Enabling traffic to bucket "default" [ns_server:info,2014-08-19T16:48:18.839,ns_1@127.0.0.1:ns_memcached-default<0.19197.0>:ns_memcached:handle_call:251]Bucket "default" marked as warmed in 1 seconds [ns_server:debug,2014-08-19T16:48:18.955,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1022. Nacking mccouch update. [views:debug,2014-08-19T16:48:18.955,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1022. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:18.956,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1022,active,0} [ns_server:debug,2014-08-19T16:48:18.956,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1022,1023] [views:debug,2014-08-19T16:48:18.990,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1022. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:18.990,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1022,active,0} [ns_server:debug,2014-08-19T16:48:19.057,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1020. Nacking mccouch update. [views:debug,2014-08-19T16:48:19.057,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1020. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:19.058,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1022,1020,1023] [ns_server:debug,2014-08-19T16:48:19.058,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1020,active,0} [views:debug,2014-08-19T16:48:19.091,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1020. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:19.091,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1020,active,0} [ns_server:debug,2014-08-19T16:48:19.158,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1018. Nacking mccouch update. [views:debug,2014-08-19T16:48:19.158,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1018. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:19.158,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1018,active,0} [ns_server:debug,2014-08-19T16:48:19.158,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1022,1018,1020,1023] [views:debug,2014-08-19T16:48:19.192,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1018. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:19.192,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1018,active,0} [ns_server:debug,2014-08-19T16:48:19.284,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1016. Nacking mccouch update. [views:debug,2014-08-19T16:48:19.284,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1016. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:19.284,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1016,active,0} [ns_server:debug,2014-08-19T16:48:19.284,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1018,1020,1023] [views:debug,2014-08-19T16:48:19.343,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1016. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:19.343,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1016,active,0} [ns_server:debug,2014-08-19T16:48:19.477,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1014. Nacking mccouch update. [views:debug,2014-08-19T16:48:19.477,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1014. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:19.477,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1014,active,0} [ns_server:debug,2014-08-19T16:48:19.477,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1018,1014,1020,1023] [views:debug,2014-08-19T16:48:19.544,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1014. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:19.544,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1014,active,0} [ns_server:debug,2014-08-19T16:48:19.694,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1012. Nacking mccouch update. [views:debug,2014-08-19T16:48:19.694,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1012. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:19.694,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1012,active,0} [ns_server:debug,2014-08-19T16:48:19.694,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1012,1018,1014,1020,1023] [views:debug,2014-08-19T16:48:19.761,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1012. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:19.761,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1012,active,0} [ns_server:debug,2014-08-19T16:48:19.911,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1010. Nacking mccouch update. [views:debug,2014-08-19T16:48:19.912,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1010. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:19.912,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1010,active,0} [ns_server:debug,2014-08-19T16:48:19.912,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1012,1018,1014,1020,1023,1010] [views:debug,2014-08-19T16:48:19.987,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1010. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:19.987,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1010,active,0} [ns_server:debug,2014-08-19T16:48:20.054,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1008. Nacking mccouch update. [views:debug,2014-08-19T16:48:20.054,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1008. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:20.054,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1008,active,0} [ns_server:debug,2014-08-19T16:48:20.054,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1012,1018,1008,1014,1020,1023,1010] [views:debug,2014-08-19T16:48:20.088,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1008. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:20.088,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1008,active,0} [ns_server:debug,2014-08-19T16:48:20.168,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1006. Nacking mccouch update. [views:debug,2014-08-19T16:48:20.168,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1006. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:20.168,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1006,active,0} [ns_server:debug,2014-08-19T16:48:20.168,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1006,1012,1018,1008,1014,1020,1023,1010] [views:debug,2014-08-19T16:48:20.252,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1006. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:20.252,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1006,active,0} [ns_server:debug,2014-08-19T16:48:20.427,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1004. Nacking mccouch update. [views:debug,2014-08-19T16:48:20.427,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1004. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:20.427,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1004,active,0} [ns_server:debug,2014-08-19T16:48:20.427,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1006,1012,1018,1008,1014,1020,1004,1023,1010] [views:debug,2014-08-19T16:48:20.511,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1004. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:20.511,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1004,active,0} [ns_server:debug,2014-08-19T16:48:20.678,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1002. Nacking mccouch update. [views:debug,2014-08-19T16:48:20.678,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1002. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:20.678,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1002,active,0} [ns_server:debug,2014-08-19T16:48:20.678,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1022,1006,1012,1018,1002,1008,1014,1020,1004,1023,1010] [views:debug,2014-08-19T16:48:20.762,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1002. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:20.762,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1002,active,0} [ns_server:debug,2014-08-19T16:48:20.928,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1000. Nacking mccouch update. [views:debug,2014-08-19T16:48:20.928,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1000. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:20.928,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1000,active,0} [ns_server:debug,2014-08-19T16:48:20.928,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,1022,1006,1012,1018,1002,1008,1014,1020,1004,1023,1010] [views:debug,2014-08-19T16:48:21.004,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1000. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:21.004,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1000,active,0} [ns_server:debug,2014-08-19T16:48:21.179,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 998. Nacking mccouch update. [views:debug,2014-08-19T16:48:21.179,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/998. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:21.179,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",998,active,0} [ns_server:debug,2014-08-19T16:48:21.179,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,1022,1006,1012,1018,1002,1008,998,1014,1020,1004,1023,1010] [views:debug,2014-08-19T16:48:21.247,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/998. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:21.247,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",998,active,0} [ns_server:debug,2014-08-19T16:48:21.323,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 996. Nacking mccouch update. [views:debug,2014-08-19T16:48:21.323,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/996. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:21.323,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",996,active,0} [ns_server:debug,2014-08-19T16:48:21.323,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,1022,1006,996,1012,1018,1002,1008,998,1014,1020,1004,1023,1010] [views:debug,2014-08-19T16:48:21.357,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/996. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:21.357,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",996,active,0} [ns_server:info,2014-08-19T16:48:21.373,ns_1@127.0.0.1:ns_doctor<0.17945.0>:ns_doctor:update_status:241]The following buckets became ready on node 'ns_1@127.0.0.1': ["default"] [ns_server:debug,2014-08-19T16:48:21.424,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 994. Nacking mccouch update. [views:debug,2014-08-19T16:48:21.424,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/994. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:21.424,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",994,active,0} [ns_server:debug,2014-08-19T16:48:21.424,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,1022,1006,996,1012,1018,1002,1008,998,1014,1020,1004,1023,994,1010] [views:debug,2014-08-19T16:48:21.457,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/994. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:21.458,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",994,active,0} [ns_server:debug,2014-08-19T16:48:21.524,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 992. Nacking mccouch update. [views:debug,2014-08-19T16:48:21.525,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/992. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:21.525,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",992,active,0} [ns_server:debug,2014-08-19T16:48:21.525,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,1022,1006,996,1012,1018,1002,992,1008,998,1014,1020,1004,1023,994, 1010] [views:debug,2014-08-19T16:48:21.559,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/992. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:21.559,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",992,active,0} [ns_server:debug,2014-08-19T16:48:21.625,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 990. Nacking mccouch update. [views:debug,2014-08-19T16:48:21.625,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/990. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:21.625,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",990,active,0} [ns_server:debug,2014-08-19T16:48:21.626,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,990,1022,1006,996,1012,1018,1002,992,1008,998,1014,1020,1004,1023, 994,1010] [views:debug,2014-08-19T16:48:21.660,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/990. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:21.660,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",990,active,0} [ns_server:debug,2014-08-19T16:48:21.771,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 988. Nacking mccouch update. [views:debug,2014-08-19T16:48:21.771,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/988. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:21.771,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",988,active,0} [ns_server:debug,2014-08-19T16:48:21.772,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,990,1022,1006,996,1012,1018,1002,992,1008,998,1014,988,1020,1004, 1023,994,1010] [views:debug,2014-08-19T16:48:21.856,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/988. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:21.856,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",988,active,0} [ns_server:debug,2014-08-19T16:48:21.997,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 986. Nacking mccouch update. [views:debug,2014-08-19T16:48:21.997,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/986. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:21.997,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",986,active,0} [ns_server:debug,2014-08-19T16:48:21.997,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [1016,1000,990,1022,1006,996,1012,986,1018,1002,992,1008,998,1014,988,1020, 1004,1023,994,1010] [views:debug,2014-08-19T16:48:22.065,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/986. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:22.065,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",986,active,0} [ns_server:debug,2014-08-19T16:48:22.198,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 984. Nacking mccouch update. [views:debug,2014-08-19T16:48:22.198,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/984. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:22.198,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",984,active,0} [ns_server:debug,2014-08-19T16:48:22.198,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,1022,1006,996,1012,986,1018,1002,992,1008,998,1014,988, 1020,1004,1023,994,1010] [views:debug,2014-08-19T16:48:22.266,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/984. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:22.266,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",984,active,0} [ns_server:debug,2014-08-19T16:48:22.399,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 982. Nacking mccouch update. [views:debug,2014-08-19T16:48:22.399,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/982. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:22.399,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",982,active,0} [ns_server:debug,2014-08-19T16:48:22.399,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,1022,1006,996,1012,986,1018,1002,992,1008,998,982,1014,988, 1020,1004,1023,994,1010] [views:debug,2014-08-19T16:48:22.484,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/982. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:22.484,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",982,active,0} [ns_server:debug,2014-08-19T16:48:22.634,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 980. Nacking mccouch update. [views:debug,2014-08-19T16:48:22.634,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/980. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:22.634,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",980,active,0} [ns_server:debug,2014-08-19T16:48:22.634,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,1022,1006,996,980,1012,986,1018,1002,992,1008,998,982,1014, 988,1020,1004,1023,994,1010] [views:debug,2014-08-19T16:48:22.718,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/980. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:22.718,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",980,active,0} [ns_server:debug,2014-08-19T16:48:22.818,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 978. Nacking mccouch update. [views:debug,2014-08-19T16:48:22.818,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/978. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:22.819,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,1022,1006,996,980,1012,986,1018,1002,992,1008,998,982,1014, 988,1020,1004,1023,994,978,1010] [ns_server:debug,2014-08-19T16:48:22.819,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",978,active,0} [views:debug,2014-08-19T16:48:22.852,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/978. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:22.852,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",978,active,0} [ns_server:debug,2014-08-19T16:48:22.944,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 976. Nacking mccouch update. [views:debug,2014-08-19T16:48:22.944,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/976. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:22.944,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,1022,1006,996,980,1012,986,1018,1002,992,976,1008,998,982, 1014,988,1020,1004,1023,994,978,1010] [ns_server:debug,2014-08-19T16:48:22.944,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",976,active,0} [views:debug,2014-08-19T16:48:22.978,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/976. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:22.978,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",976,active,0} [ns_server:debug,2014-08-19T16:48:23.128,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 974. Nacking mccouch update. [views:debug,2014-08-19T16:48:23.128,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/974. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:23.128,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",974,active,0} [ns_server:debug,2014-08-19T16:48:23.128,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,974,1022,1006,996,980,1012,986,1018,1002,992,976,1008,998, 982,1014,988,1020,1004,1023,994,978,1010] [views:debug,2014-08-19T16:48:23.179,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/974. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:23.179,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",974,active,0} [ns_server:debug,2014-08-19T16:48:23.337,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 972. Nacking mccouch update. [views:debug,2014-08-19T16:48:23.337,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/972. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:23.337,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",972,active,0} [ns_server:debug,2014-08-19T16:48:23.338,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,974,1022,1006,996,980,1012,986,1018,1002,992,976,1008,998, 982,1014,988,972,1020,1004,1023,994,978,1010] [views:debug,2014-08-19T16:48:23.413,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/972. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:23.413,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",972,active,0} [ns_server:debug,2014-08-19T16:48:23.568,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 970. Nacking mccouch update. [views:debug,2014-08-19T16:48:23.568,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/970. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:23.568,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",970,active,0} [ns_server:debug,2014-08-19T16:48:23.568,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,1016,1000,990,974,1022,1006,996,980,1012,986,970,1018,1002,992,976,1008, 998,982,1014,988,972,1020,1004,1023,994,978,1010] [views:debug,2014-08-19T16:48:23.627,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/970. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:23.628,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",970,active,0} [ns_server:debug,2014-08-19T16:48:23.694,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 968. Nacking mccouch update. [views:debug,2014-08-19T16:48:23.694,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/968. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:23.694,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",968,active,0} [ns_server:debug,2014-08-19T16:48:23.694,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,1022,1006,996,980,1012,986,970,1018,1002,992,976, 1008,998,982,1014,988,972,1020,1004,1023,994,978,1010] [views:debug,2014-08-19T16:48:23.727,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/968. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:23.727,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",968,active,0} [ns_server:debug,2014-08-19T16:48:23.794,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 966. Nacking mccouch update. [views:debug,2014-08-19T16:48:23.794,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/966. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:23.795,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",966,active,0} [ns_server:debug,2014-08-19T16:48:23.795,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,1022,1006,996,980,1012,986,970,1018,1002,992,976, 1008,998,982,966,1014,988,972,1020,1004,1023,994,978,1010] [views:debug,2014-08-19T16:48:23.829,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/966. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:23.829,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",966,active,0} [ns_server:debug,2014-08-19T16:48:23.920,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 964. Nacking mccouch update. [views:debug,2014-08-19T16:48:23.920,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/964. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:23.921,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",964,active,0} [ns_server:debug,2014-08-19T16:48:23.921,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,1022,1006,996,980,964,1012,986,970,1018,1002,992, 976,1008,998,982,966,1014,988,972,1020,1004,1023,994,978,1010] [views:debug,2014-08-19T16:48:23.980,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/964. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:23.980,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",964,active,0} [ns_server:debug,2014-08-19T16:48:24.046,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 962. Nacking mccouch update. [views:debug,2014-08-19T16:48:24.046,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/962. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:24.046,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",962,active,0} [ns_server:debug,2014-08-19T16:48:24.046,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,1022,1006,996,980,964,1012,986,970,1018,1002,992, 976,1008,998,982,966,1014,988,972,1020,1004,1023,994,978,962,1010] [views:debug,2014-08-19T16:48:24.089,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/962. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:24.089,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",962,active,0} [ns_server:debug,2014-08-19T16:48:24.247,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 960. Nacking mccouch update. [views:debug,2014-08-19T16:48:24.247,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/960. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:24.247,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",960,active,0} [ns_server:debug,2014-08-19T16:48:24.247,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,1022,1006,996,980,964,1012,986,970,1018,1002,992, 976,960,1008,998,982,966,1014,988,972,1020,1004,1023,994,978,962,1010] [views:debug,2014-08-19T16:48:24.308,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/960. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:24.308,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",960,active,0} [ns_server:debug,2014-08-19T16:48:24.474,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 958. Nacking mccouch update. [views:debug,2014-08-19T16:48:24.474,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/958. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:24.474,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",958,active,0} [ns_server:debug,2014-08-19T16:48:24.474,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,958,1022,1006,996,980,964,1012,986,970,1018,1002, 992,976,960,1008,998,982,966,1014,988,972,1020,1004,1023,994,978,962,1010] [views:debug,2014-08-19T16:48:24.533,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/958. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:24.533,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",958,active,0} [ns_server:debug,2014-08-19T16:48:24.700,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 956. Nacking mccouch update. [views:debug,2014-08-19T16:48:24.700,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/956. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:24.700,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",956,active,0} [ns_server:debug,2014-08-19T16:48:24.700,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,958,1022,1006,996,980,964,1012,986,970,1018,1002, 992,976,960,1008,998,982,966,1014,988,972,956,1020,1004,1023,994,978,962, 1010] [views:debug,2014-08-19T16:48:24.759,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/956. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:24.759,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",956,active,0} [ns_server:debug,2014-08-19T16:48:24.925,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 954. Nacking mccouch update. [views:debug,2014-08-19T16:48:24.926,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/954. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:24.926,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,1016,1000,990,974,958,1022,1006,996,980,964,1012,986,970,954,1018, 1002,992,976,960,1008,998,982,966,1014,988,972,956,1020,1004,1023,994,978, 962,1010] [ns_server:debug,2014-08-19T16:48:24.926,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",954,active,0} [views:debug,2014-08-19T16:48:24.984,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/954. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:24.984,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",954,active,0} [ns_server:debug,2014-08-19T16:48:25.080,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 952. Nacking mccouch update. [views:debug,2014-08-19T16:48:25.080,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/952. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:25.080,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,1022,1006,996,980,964,1012,986,970,954, 1018,1002,992,976,960,1008,998,982,966,1014,988,972,956,1020,1004,1023,994, 978,962,1010] [ns_server:debug,2014-08-19T16:48:25.080,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",952,active,0} [views:debug,2014-08-19T16:48:25.114,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/952. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:25.114,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",952,active,0} [ns_server:debug,2014-08-19T16:48:25.181,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 950. Nacking mccouch update. [views:debug,2014-08-19T16:48:25.181,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/950. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:25.181,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",950,active,0} [ns_server:debug,2014-08-19T16:48:25.181,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,1022,1006,996,980,964,1012,986,970,954, 1018,1002,992,976,960,1008,998,982,966,950,1014,988,972,956,1020,1004,1023, 994,978,962,1010] [views:debug,2014-08-19T16:48:25.215,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/950. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:25.215,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",950,active,0} [ns_server:debug,2014-08-19T16:48:25.298,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 948. Nacking mccouch update. [views:debug,2014-08-19T16:48:25.298,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/948. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:25.299,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",948,active,0} [ns_server:debug,2014-08-19T16:48:25.299,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,1022,1006,996,980,964,948,1012,986,970,954, 1018,1002,992,976,960,1008,998,982,966,950,1014,988,972,956,1020,1004,1023, 994,978,962,1010] [views:debug,2014-08-19T16:48:25.349,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/948. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:25.350,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",948,active,0} [ns_server:debug,2014-08-19T16:48:25.416,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 946. Nacking mccouch update. [views:debug,2014-08-19T16:48:25.416,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/946. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:25.416,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",946,active,0} [ns_server:debug,2014-08-19T16:48:25.416,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,1022,1006,996,980,964,948,1012,986,970,954, 1018,1002,992,976,960,1008,998,982,966,950,1014,988,972,956,1020,1004,1023, 994,978,962,946,1010] [views:debug,2014-08-19T16:48:25.450,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/946. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:25.450,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",946,active,0} [ns_server:debug,2014-08-19T16:48:25.551,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 944. Nacking mccouch update. [views:debug,2014-08-19T16:48:25.551,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/944. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:25.551,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",944,active,0} [ns_server:debug,2014-08-19T16:48:25.551,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,1022,1006,996,980,964,948,1012,986,970,954, 1018,1002,992,976,960,944,1008,998,982,966,950,1014,988,972,956,1020,1004, 1023,994,978,962,946,1010] [views:debug,2014-08-19T16:48:25.619,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/944. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:25.619,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",944,active,0} [ns_server:debug,2014-08-19T16:48:25.793,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 942. Nacking mccouch update. [views:debug,2014-08-19T16:48:25.794,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/942. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:25.794,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",942,active,0} [ns_server:debug,2014-08-19T16:48:25.794,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,942,1022,1006,996,980,964,948,1012,986,970, 954,1018,1002,992,976,960,944,1008,998,982,966,950,1014,988,972,956,1020, 1004,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:48:25.852,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/942. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:25.852,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",942,active,0} [ns_server:debug,2014-08-19T16:48:26.011,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 940. Nacking mccouch update. [views:debug,2014-08-19T16:48:26.011,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/940. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:26.011,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",940,active,0} [ns_server:debug,2014-08-19T16:48:26.011,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,942,1022,1006,996,980,964,948,1012,986,970, 954,1018,1002,992,976,960,944,1008,998,982,966,950,1014,988,972,956,940,1020, 1004,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:48:26.062,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/940. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:26.062,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",940,active,0} [ns_server:debug,2014-08-19T16:48:26.220,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 938. Nacking mccouch update. [views:debug,2014-08-19T16:48:26.220,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/938. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:26.220,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",938,active,0} [ns_server:debug,2014-08-19T16:48:26.220,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,1016,1000,990,974,958,942,1022,1006,996,980,964,948,1012,986,970, 954,938,1018,1002,992,976,960,944,1008,998,982,966,950,1014,988,972,956,940, 1020,1004,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:48:26.271,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/938. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:26.271,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",938,active,0} [ns_server:debug,2014-08-19T16:48:26.412,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 936. Nacking mccouch update. [views:debug,2014-08-19T16:48:26.412,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/936. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:26.413,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",936,active,0} [ns_server:debug,2014-08-19T16:48:26.412,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,1016,1000,990,974,958,942,1022,1006,996,980,964,948,1012,986, 970,954,938,1018,1002,992,976,960,944,1008,998,982,966,950,1014,988,972,956, 940,1020,1004,1023,994,978,962,946,1010] [ns_server:debug,2014-08-19T16:48:26.437,ns_1@127.0.0.1:compaction_daemon<0.18062.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2014-08-19T16:48:26.440,ns_1@127.0.0.1:<0.19911.0>:compaction_daemon:check_all_dbs_exist:1611]Skipping compaction of bucket `default` since at least database `default/0` seems to be missing. [ns_server:debug,2014-08-19T16:48:26.440,ns_1@127.0.0.1:compaction_daemon<0.18062.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:48:26.440,ns_1@127.0.0.1:compaction_daemon<0.18062.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [views:debug,2014-08-19T16:48:26.480,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/936. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:26.480,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",936,active,0} [ns_server:debug,2014-08-19T16:48:26.593,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 934. Nacking mccouch update. [views:debug,2014-08-19T16:48:26.593,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/934. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:26.594,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",934,active,0} [ns_server:debug,2014-08-19T16:48:26.593,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,1016,1000,990,974,958,942,1022,1006,996,980,964,948,1012,986, 970,954,938,1018,1002,992,976,960,944,1008,998,982,966,950,934,1014,988,972, 956,940,1020,1004,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:48:26.652,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/934. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:26.652,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",934,active,0} [ns_server:debug,2014-08-19T16:48:26.744,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 932. Nacking mccouch update. [views:debug,2014-08-19T16:48:26.744,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/932. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:26.745,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",932,active,0} [ns_server:debug,2014-08-19T16:48:26.744,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,1016,1000,990,974,958,942,1022,1006,996,980,964,948,932,1012, 986,970,954,938,1018,1002,992,976,960,944,1008,998,982,966,950,934,1014,988, 972,956,940,1020,1004,1023,994,978,962,946,1010] [views:debug,2014-08-19T16:48:26.803,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/932. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:26.803,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",932,active,0} [ns_server:debug,2014-08-19T16:48:26.895,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 930. Nacking mccouch update. [views:debug,2014-08-19T16:48:26.895,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/930. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:26.895,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",930,active,0} [ns_server:debug,2014-08-19T16:48:26.895,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,1016,1000,990,974,958,942,1022,1006,996,980,964,948,932,1012, 986,970,954,938,1018,1002,992,976,960,944,1008,998,982,966,950,934,1014,988, 972,956,940,1020,1004,1023,994,978,962,946,930,1010] [views:debug,2014-08-19T16:48:26.954,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/930. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:26.954,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",930,active,0} [ns_server:debug,2014-08-19T16:48:27.046,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 928. Nacking mccouch update. [views:debug,2014-08-19T16:48:27.046,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/928. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:27.046,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",928,active,0} [ns_server:debug,2014-08-19T16:48:27.046,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,1016,1000,990,974,958,942,1022,1006,996,980,964,948,932,1012, 986,970,954,938,1018,1002,992,976,960,944,928,1008,998,982,966,950,934,1014, 988,972,956,940,1020,1004,1023,994,978,962,946,930,1010] [views:debug,2014-08-19T16:48:27.105,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/928. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:27.105,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",928,active,0} [ns_server:debug,2014-08-19T16:48:27.196,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 926. Nacking mccouch update. [views:debug,2014-08-19T16:48:27.196,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/926. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:27.196,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",926,active,0} [ns_server:debug,2014-08-19T16:48:27.197,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,1016,1000,990,974,958,942,926,1022,1006,996,980,964,948,932, 1012,986,970,954,938,1018,1002,992,976,960,944,928,1008,998,982,966,950,934, 1014,988,972,956,940,1020,1004,1023,994,978,962,946,930,1010] [views:debug,2014-08-19T16:48:27.263,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/926. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:27.264,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",926,active,0} [ns_server:debug,2014-08-19T16:48:27.405,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 924. Nacking mccouch update. [views:debug,2014-08-19T16:48:27.405,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/924. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:27.405,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",924,active,0} [ns_server:debug,2014-08-19T16:48:27.405,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,1016,1000,990,974,958,942,926,1022,1006,996,980,964,948,932, 1012,986,970,954,938,1018,1002,992,976,960,944,928,1008,998,982,966,950,934, 1014,988,972,956,940,924,1020,1004,1023,994,978,962,946,930,1010] [views:debug,2014-08-19T16:48:27.473,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/924. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:27.473,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",924,active,0} [ns_server:debug,2014-08-19T16:48:27.631,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 922. Nacking mccouch update. [views:debug,2014-08-19T16:48:27.631,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/922. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:27.631,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",922,active,0} [ns_server:debug,2014-08-19T16:48:27.632,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,1016,1000,990,974,958,942,926,1022,1006,996,980,964,948,932, 1012,986,970,954,938,922,1018,1002,992,976,960,944,928,1008,998,982,966,950, 934,1014,988,972,956,940,924,1020,1004,1023,994,978,962,946,930,1010] [views:debug,2014-08-19T16:48:27.682,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/922. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:27.682,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",922,active,0} [ns_server:debug,2014-08-19T16:48:27.850,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 920. Nacking mccouch update. [views:debug,2014-08-19T16:48:27.850,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/920. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:27.850,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",920,active,0} [ns_server:debug,2014-08-19T16:48:27.850,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,1016,1000,990,974,958,942,926,1022,1006,996,980,964,948, 932,1012,986,970,954,938,922,1018,1002,992,976,960,944,928,1008,998,982,966, 950,934,1014,988,972,956,940,924,1020,1004,1023,994,978,962,946,930,1010] [views:debug,2014-08-19T16:48:27.934,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/920. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:27.934,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",920,active,0} [ns_server:debug,2014-08-19T16:48:28.101,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 918. Nacking mccouch update. [views:debug,2014-08-19T16:48:28.101,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/918. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:28.101,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",918,active,0} [ns_server:debug,2014-08-19T16:48:28.101,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,1016,1000,990,974,958,942,926,1022,1006,996,980,964,948, 932,1012,986,970,954,938,922,1018,1002,992,976,960,944,928,1008,998,982,966, 950,934,918,1014,988,972,956,940,924,1020,1004,1023,994,978,962,946,930,1010] [views:debug,2014-08-19T16:48:28.155,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/918. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:28.155,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",918,active,0} [ns_server:debug,2014-08-19T16:48:28.239,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 916. Nacking mccouch update. [views:debug,2014-08-19T16:48:28.239,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/916. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:28.239,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",916,active,0} [ns_server:debug,2014-08-19T16:48:28.239,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,1016,1000,990,974,958,942,926,1022,1006,996,980,964,948, 932,916,1012,986,970,954,938,922,1018,1002,992,976,960,944,928,1008,998,982, 966,950,934,918,1014,988,972,956,940,924,1020,1004,1023,994,978,962,946,930, 1010] [views:debug,2014-08-19T16:48:28.289,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/916. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:28.289,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",916,active,0} [ns_server:debug,2014-08-19T16:48:28.373,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 914. Nacking mccouch update. [views:debug,2014-08-19T16:48:28.373,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/914. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:28.373,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",914,active,0} [ns_server:debug,2014-08-19T16:48:28.373,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,1016,1000,990,974,958,942,926,1022,1006,996,980,964,948, 932,916,1012,986,970,954,938,922,1018,1002,992,976,960,944,928,1008,998,982, 966,950,934,918,1014,988,972,956,940,924,1020,1004,1023,994,978,962,946,930, 914,1010] [views:debug,2014-08-19T16:48:28.423,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/914. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:28.423,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",914,active,0} [ns_server:debug,2014-08-19T16:48:28.507,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 912. Nacking mccouch update. [views:debug,2014-08-19T16:48:28.507,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/912. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:28.507,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",912,active,0} [ns_server:debug,2014-08-19T16:48:28.507,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,1016,1000,990,974,958,942,926,1022,1006,996,980,964,948, 932,916,1012,986,970,954,938,922,1018,1002,992,976,960,944,928,912,1008,998, 982,966,950,934,918,1014,988,972,956,940,924,1020,1004,1023,994,978,962,946, 930,914,1010] [views:debug,2014-08-19T16:48:28.557,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/912. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:28.558,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",912,active,0} [ns_server:debug,2014-08-19T16:48:28.624,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 910. Nacking mccouch update. [views:debug,2014-08-19T16:48:28.624,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/910. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:28.625,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",910,active,0} [ns_server:debug,2014-08-19T16:48:28.625,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,1016,1000,990,974,958,942,926,910,1022,1006,996,980,964, 948,932,916,1012,986,970,954,938,922,1018,1002,992,976,960,944,928,912,1008, 998,982,966,950,934,918,1014,988,972,956,940,924,1020,1004,1023,994,978,962, 946,930,914,1010] [views:debug,2014-08-19T16:48:28.658,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/910. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:28.658,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",910,active,0} [ns_server:debug,2014-08-19T16:48:28.793,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 908. Nacking mccouch update. [views:debug,2014-08-19T16:48:28.793,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/908. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:28.793,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",908,active,0} [ns_server:debug,2014-08-19T16:48:28.793,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,1016,1000,990,974,958,942,926,910,1022,1006,996,980,964, 948,932,916,1012,986,970,954,938,922,1018,1002,992,976,960,944,928,912,1008, 998,982,966,950,934,918,1014,988,972,956,940,924,908,1020,1004,1023,994,978, 962,946,930,914,1010] [views:debug,2014-08-19T16:48:28.877,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/908. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:28.877,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",908,active,0} [ns_server:debug,2014-08-19T16:48:29.044,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 906. Nacking mccouch update. [views:debug,2014-08-19T16:48:29.044,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/906. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:29.044,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",906,active,0} [ns_server:debug,2014-08-19T16:48:29.044,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,1016,1000,990,974,958,942,926,910,1022,1006,996,980,964, 948,932,916,1012,986,970,954,938,922,906,1018,1002,992,976,960,944,928,912, 1008,998,982,966,950,934,918,1014,988,972,956,940,924,908,1020,1004,1023,994, 978,962,946,930,914,1010] [views:debug,2014-08-19T16:48:29.102,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/906. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:29.103,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",906,active,0} [ns_server:debug,2014-08-19T16:48:29.269,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 904. Nacking mccouch update. [views:debug,2014-08-19T16:48:29.269,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/904. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:29.271,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",904,active,0} [ns_server:debug,2014-08-19T16:48:29.271,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,1016,1000,990,974,958,942,926,910,1022,1006,996,980, 964,948,932,916,1012,986,970,954,938,922,906,1018,1002,992,976,960,944,928, 912,1008,998,982,966,950,934,918,1014,988,972,956,940,924,908,1020,1004,1023, 994,978,962,946,930,914,1010] [views:debug,2014-08-19T16:48:29.353,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/904. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:29.353,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",904,active,0} [ns_server:debug,2014-08-19T16:48:29.520,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 902. Nacking mccouch update. [views:debug,2014-08-19T16:48:29.520,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/902. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:29.520,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",902,active,0} [ns_server:debug,2014-08-19T16:48:29.520,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,1016,1000,990,974,958,942,926,910,1022,1006,996,980, 964,948,932,916,1012,986,970,954,938,922,906,1018,1002,992,976,960,944,928, 912,1008,998,982,966,950,934,918,902,1014,988,972,956,940,924,908,1020,1004, 1023,994,978,962,946,930,914,1010] [views:debug,2014-08-19T16:48:29.604,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/902. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:29.604,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",902,active,0} [ns_server:debug,2014-08-19T16:48:29.763,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 900. Nacking mccouch update. [views:debug,2014-08-19T16:48:29.763,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/900. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:29.763,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",900,active,0} [ns_server:debug,2014-08-19T16:48:29.763,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,1016,1000,990,974,958,942,926,910,1022,1006,996,980, 964,948,932,916,900,1012,986,970,954,938,922,906,1018,1002,992,976,960,944, 928,912,1008,998,982,966,950,934,918,902,1014,988,972,956,940,924,908,1020, 1004,1023,994,978,962,946,930,914,1010] [views:debug,2014-08-19T16:48:29.817,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/900. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:29.817,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",900,active,0} [ns_server:debug,2014-08-19T16:48:29.884,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 898. Nacking mccouch update. [views:debug,2014-08-19T16:48:29.884,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/898. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:29.884,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",898,active,0} [ns_server:debug,2014-08-19T16:48:29.884,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,1016,1000,990,974,958,942,926,910,1022,1006,996,980, 964,948,932,916,900,1012,986,970,954,938,922,906,1018,1002,992,976,960,944, 928,912,1008,998,982,966,950,934,918,902,1014,988,972,956,940,924,908,1020, 1004,1023,994,978,962,946,930,914,898,1010] [views:debug,2014-08-19T16:48:29.918,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/898. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:29.918,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",898,active,0} [ns_server:debug,2014-08-19T16:48:30.036,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 896. Nacking mccouch update. [views:debug,2014-08-19T16:48:30.036,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/896. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:30.036,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",896,active,0} [ns_server:debug,2014-08-19T16:48:30.036,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,1016,1000,990,974,958,942,926,910,1022,1006,996,980, 964,948,932,916,900,1012,986,970,954,938,922,906,1018,1002,992,976,960,944, 928,912,896,1008,998,982,966,950,934,918,902,1014,988,972,956,940,924,908, 1020,1004,1023,994,978,962,946,930,914,898,1010] [views:debug,2014-08-19T16:48:30.095,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/896. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:30.095,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",896,active,0} [ns_server:debug,2014-08-19T16:48:30.212,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 894. Nacking mccouch update. [views:debug,2014-08-19T16:48:30.212,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/894. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:30.212,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",894,active,0} [ns_server:debug,2014-08-19T16:48:30.212,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,1016,1000,990,974,958,942,926,910,894,1022,1006,996, 980,964,948,932,916,900,1012,986,970,954,938,922,906,1018,1002,992,976,960, 944,928,912,896,1008,998,982,966,950,934,918,902,1014,988,972,956,940,924, 908,1020,1004,1023,994,978,962,946,930,914,898,1010] [views:debug,2014-08-19T16:48:30.271,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/894. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:30.271,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",894,active,0} [ns_server:debug,2014-08-19T16:48:30.363,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 892. Nacking mccouch update. [views:debug,2014-08-19T16:48:30.363,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/892. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:30.363,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",892,active,0} [ns_server:debug,2014-08-19T16:48:30.363,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,1016,1000,990,974,958,942,926,910,894,1022,1006,996, 980,964,948,932,916,900,1012,986,970,954,938,922,906,1018,1002,992,976,960, 944,928,912,896,1008,998,982,966,950,934,918,902,1014,988,972,956,940,924, 908,892,1020,1004,1023,994,978,962,946,930,914,898,1010] [views:debug,2014-08-19T16:48:30.396,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/892. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:30.396,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",892,active,0} [ns_server:debug,2014-08-19T16:48:30.563,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 890. Nacking mccouch update. [views:debug,2014-08-19T16:48:30.563,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/890. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:30.563,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",890,active,0} [ns_server:debug,2014-08-19T16:48:30.563,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,1016,1000,990,974,958,942,926,910,894,1022,1006,996, 980,964,948,932,916,900,1012,986,970,954,938,922,906,890,1018,1002,992,976, 960,944,928,912,896,1008,998,982,966,950,934,918,902,1014,988,972,956,940, 924,908,892,1020,1004,1023,994,978,962,946,930,914,898,1010] [views:debug,2014-08-19T16:48:30.639,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/890. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:30.639,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",890,active,0} [ns_server:debug,2014-08-19T16:48:30.772,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 888. Nacking mccouch update. [views:debug,2014-08-19T16:48:30.772,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/888. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:30.772,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",888,active,0} [ns_server:debug,2014-08-19T16:48:30.772,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,888,1016,1000,990,974,958,942,926,910,894,1022,1006, 996,980,964,948,932,916,900,1012,986,970,954,938,922,906,890,1018,1002,992, 976,960,944,928,912,896,1008,998,982,966,950,934,918,902,1014,988,972,956, 940,924,908,892,1020,1004,1023,994,978,962,946,930,914,898,1010] [views:debug,2014-08-19T16:48:30.839,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/888. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:30.840,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",888,active,0} [ns_server:debug,2014-08-19T16:48:30.964,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 886. Nacking mccouch update. [views:debug,2014-08-19T16:48:30.965,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/886. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:30.965,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",886,active,0} [ns_server:debug,2014-08-19T16:48:30.965,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,888,1016,1000,990,974,958,942,926,910,894,1022,1006, 996,980,964,948,932,916,900,1012,986,970,954,938,922,906,890,1018,1002,992, 976,960,944,928,912,896,1008,998,982,966,950,934,918,902,886,1014,988,972, 956,940,924,908,892,1020,1004,1023,994,978,962,946,930,914,898,1010] [views:debug,2014-08-19T16:48:31.032,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/886. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:31.032,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",886,active,0} [ns_server:debug,2014-08-19T16:48:31.157,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 884. Nacking mccouch update. [views:debug,2014-08-19T16:48:31.157,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/884. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:31.157,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",884,active,0} [ns_server:debug,2014-08-19T16:48:31.157,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,888,1016,1000,990,974,958,942,926,910,894,1022,1006, 996,980,964,948,932,916,900,884,1012,986,970,954,938,922,906,890,1018,1002, 992,976,960,944,928,912,896,1008,998,982,966,950,934,918,902,886,1014,988, 972,956,940,924,908,892,1020,1004,1023,994,978,962,946,930,914,898,1010] [views:debug,2014-08-19T16:48:31.208,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/884. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:31.208,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",884,active,0} [ns_server:debug,2014-08-19T16:48:31.330,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 882. Nacking mccouch update. [views:debug,2014-08-19T16:48:31.330,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/882. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:31.330,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",882,active,0} [ns_server:debug,2014-08-19T16:48:31.330,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,888,1016,1000,990,974,958,942,926,910,894,1022,1006, 996,980,964,948,932,916,900,884,1012,986,970,954,938,922,906,890,1018,1002, 992,976,960,944,928,912,896,1008,998,982,966,950,934,918,902,886,1014,988, 972,956,940,924,908,892,1020,1004,1023,994,978,962,946,930,914,898,882,1010] [views:debug,2014-08-19T16:48:31.363,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/882. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:31.363,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",882,active,0} [ns_server:debug,2014-08-19T16:48:31.480,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 880. Nacking mccouch update. [views:debug,2014-08-19T16:48:31.481,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/880. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:31.481,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",880,active,0} [ns_server:debug,2014-08-19T16:48:31.481,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,888,1016,1000,990,974,958,942,926,910,894,1022,1006, 996,980,964,948,932,916,900,884,1012,986,970,954,938,922,906,890,1018,1002, 992,976,960,944,928,912,896,880,1008,998,982,966,950,934,918,902,886,1014, 988,972,956,940,924,908,892,1020,1004,1023,994,978,962,946,930,914,898,882, 1010] [views:debug,2014-08-19T16:48:31.540,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/880. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:31.540,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",880,active,0} [ns_server:debug,2014-08-19T16:48:31.631,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 878. Nacking mccouch update. [views:debug,2014-08-19T16:48:31.631,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/878. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:31.631,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",878,active,0} [ns_server:debug,2014-08-19T16:48:31.632,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,888,1016,1000,990,974,958,942,926,910,894,878,1022, 1006,996,980,964,948,932,916,900,884,1012,986,970,954,938,922,906,890,1018, 1002,992,976,960,944,928,912,896,880,1008,998,982,966,950,934,918,902,886, 1014,988,972,956,940,924,908,892,1020,1004,1023,994,978,962,946,930,914,898, 882,1010] [views:debug,2014-08-19T16:48:31.665,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/878. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:31.665,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",878,active,0} [ns_server:debug,2014-08-19T16:48:31.774,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 876. Nacking mccouch update. [views:debug,2014-08-19T16:48:31.774,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/876. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:31.774,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",876,active,0} [ns_server:debug,2014-08-19T16:48:31.774,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,888,1016,1000,990,974,958,942,926,910,894,878,1022, 1006,996,980,964,948,932,916,900,884,1012,986,970,954,938,922,906,890,1018, 1002,992,976,960,944,928,912,896,880,1008,998,982,966,950,934,918,902,886, 1014,988,972,956,940,924,908,892,876,1020,1004,1023,994,978,962,946,930,914, 898,882,1010] [views:debug,2014-08-19T16:48:31.825,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/876. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:31.825,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",876,active,0} [ns_server:debug,2014-08-19T16:48:31.925,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 874. Nacking mccouch update. [views:debug,2014-08-19T16:48:31.925,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/874. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:31.925,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",874,active,0} [ns_server:debug,2014-08-19T16:48:31.925,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,888,1016,1000,990,974,958,942,926,910,894,878,1022, 1006,996,980,964,948,932,916,900,884,1012,986,970,954,938,922,906,890,874, 1018,1002,992,976,960,944,928,912,896,880,1008,998,982,966,950,934,918,902, 886,1014,988,972,956,940,924,908,892,876,1020,1004,1023,994,978,962,946,930, 914,898,882,1010] [views:debug,2014-08-19T16:48:31.975,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/874. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:31.975,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",874,active,0} [ns_server:debug,2014-08-19T16:48:32.109,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 872. Nacking mccouch update. [views:debug,2014-08-19T16:48:32.110,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/872. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:32.110,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",872,active,0} [ns_server:debug,2014-08-19T16:48:32.110,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,888,872,1016,1000,990,974,958,942,926,910,894,878, 1022,1006,996,980,964,948,932,916,900,884,1012,986,970,954,938,922,906,890, 874,1018,1002,992,976,960,944,928,912,896,880,1008,998,982,966,950,934,918, 902,886,1014,988,972,956,940,924,908,892,876,1020,1004,1023,994,978,962,946, 930,914,898,882,1010] [views:debug,2014-08-19T16:48:32.160,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/872. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:32.160,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",872,active,0} [ns_server:debug,2014-08-19T16:48:32.294,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 870. Nacking mccouch update. [views:debug,2014-08-19T16:48:32.294,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/870. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:32.294,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",870,active,0} [ns_server:debug,2014-08-19T16:48:32.294,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,888,872,1016,1000,990,974,958,942,926,910,894,878, 1022,1006,996,980,964,948,932,916,900,884,1012,986,970,954,938,922,906,890, 874,1018,1002,992,976,960,944,928,912,896,880,1008,998,982,966,950,934,918, 902,886,870,1014,988,972,956,940,924,908,892,876,1020,1004,1023,994,978,962, 946,930,914,898,882,1010] [views:debug,2014-08-19T16:48:32.345,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/870. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:32.345,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",870,active,0} [ns_server:debug,2014-08-19T16:48:32.411,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 868. Nacking mccouch update. [views:debug,2014-08-19T16:48:32.411,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/868. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:32.411,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",868,active,0} [ns_server:debug,2014-08-19T16:48:32.411,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,888,872,1016,1000,990,974,958,942,926,910,894,878, 1022,1006,996,980,964,948,932,916,900,884,868,1012,986,970,954,938,922,906, 890,874,1018,1002,992,976,960,944,928,912,896,880,1008,998,982,966,950,934, 918,902,886,870,1014,988,972,956,940,924,908,892,876,1020,1004,1023,994,978, 962,946,930,914,898,882,1010] [views:debug,2014-08-19T16:48:32.445,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/868. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:32.445,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",868,active,0} [ns_server:debug,2014-08-19T16:48:32.512,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 866. Nacking mccouch update. [views:debug,2014-08-19T16:48:32.512,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/866. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:32.512,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",866,active,0} [ns_server:debug,2014-08-19T16:48:32.512,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,888,872,1016,1000,990,974,958,942,926,910,894,878, 1022,1006,996,980,964,948,932,916,900,884,868,1012,986,970,954,938,922,906, 890,874,1018,1002,992,976,960,944,928,912,896,880,1008,998,982,966,950,934, 918,902,886,870,1014,988,972,956,940,924,908,892,876,1020,1004,1023,994,978, 962,946,930,914,898,882,866,1010] [views:debug,2014-08-19T16:48:32.546,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/866. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:32.546,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",866,active,0} [ns_server:debug,2014-08-19T16:48:32.667,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 864. Nacking mccouch update. [views:debug,2014-08-19T16:48:32.667,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/864. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:32.667,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",864,active,0} [ns_server:debug,2014-08-19T16:48:32.667,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,888,872,1016,1000,990,974,958,942,926,910,894,878, 1022,1006,996,980,964,948,932,916,900,884,868,1012,986,970,954,938,922,906, 890,874,1018,1002,992,976,960,944,928,912,896,880,864,1008,998,982,966,950, 934,918,902,886,870,1014,988,972,956,940,924,908,892,876,1020,1004,1023,994, 978,962,946,930,914,898,882,866,1010] [views:debug,2014-08-19T16:48:32.750,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/864. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:32.751,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",864,active,0} [ns_server:debug,2014-08-19T16:48:32.909,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 862. Nacking mccouch update. [views:debug,2014-08-19T16:48:32.909,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/862. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:32.909,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",862,active,0} [ns_server:debug,2014-08-19T16:48:32.910,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,888,872,1016,1000,990,974,958,942,926,910,894,878, 862,1022,1006,996,980,964,948,932,916,900,884,868,1012,986,970,954,938,922, 906,890,874,1018,1002,992,976,960,944,928,912,896,880,864,1008,998,982,966, 950,934,918,902,886,870,1014,988,972,956,940,924,908,892,876,1020,1004,1023, 994,978,962,946,930,914,898,882,866,1010] [views:debug,2014-08-19T16:48:32.993,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/862. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:32.993,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",862,active,0} [ns_server:debug,2014-08-19T16:48:33.152,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 860. Nacking mccouch update. [views:debug,2014-08-19T16:48:33.152,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/860. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:33.152,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",860,active,0} [ns_server:debug,2014-08-19T16:48:33.152,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,888,872,1016,1000,990,974,958,942,926,910,894,878, 862,1022,1006,996,980,964,948,932,916,900,884,868,1012,986,970,954,938,922, 906,890,874,1018,1002,992,976,960,944,928,912,896,880,864,1008,998,982,966, 950,934,918,902,886,870,1014,988,972,956,940,924,908,892,876,860,1020,1004, 1023,994,978,962,946,930,914,898,882,866,1010] [views:debug,2014-08-19T16:48:33.236,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/860. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:33.236,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",860,active,0} [ns_server:debug,2014-08-19T16:48:33.412,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 858. Nacking mccouch update. [views:debug,2014-08-19T16:48:33.412,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/858. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:33.412,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",858,active,0} [ns_server:debug,2014-08-19T16:48:33.412,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,888,872,1016,1000,990,974,958,942,926,910,894,878, 862,1022,1006,996,980,964,948,932,916,900,884,868,1012,986,970,954,938,922, 906,890,874,858,1018,1002,992,976,960,944,928,912,896,880,864,1008,998,982, 966,950,934,918,902,886,870,1014,988,972,956,940,924,908,892,876,860,1020, 1004,1023,994,978,962,946,930,914,898,882,866,1010] [views:debug,2014-08-19T16:48:33.497,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/858. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:33.497,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",858,active,0} [ns_server:debug,2014-08-19T16:48:33.671,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 856. Nacking mccouch update. [views:debug,2014-08-19T16:48:33.671,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/856. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:33.671,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",856,active,0} [ns_server:debug,2014-08-19T16:48:33.672,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,968,952,936,920,904,888,872,856,1016,1000,990,974,958,942,926,910,894, 878,862,1022,1006,996,980,964,948,932,916,900,884,868,1012,986,970,954,938, 922,906,890,874,858,1018,1002,992,976,960,944,928,912,896,880,864,1008,998, 982,966,950,934,918,902,886,870,1014,988,972,956,940,924,908,892,876,860, 1020,1004,1023,994,978,962,946,930,914,898,882,866,1010] [views:debug,2014-08-19T16:48:33.738,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/856. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:33.738,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",856,active,0} [ns_server:debug,2014-08-19T16:48:33.805,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 854. Nacking mccouch update. [views:debug,2014-08-19T16:48:33.805,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/854. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:33.805,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",854,active,0} [ns_server:debug,2014-08-19T16:48:33.805,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,1016,990,974,958,942,926,910,894,878,862,1022,1006,996, 980,964,948,932,916,900,884,868,1012,986,970,954,938,922,906,890,874,858, 1018,1002,992,976,960,944,928,912,896,880,864,1008,998,982,966,950,934,918, 902,886,870,854,1014,988,972,956,940,924,908,892,876,860,1020,1004,1023,994, 978,962,946,930,914,898,882,866,1010,968,936,904,872,1000] [views:debug,2014-08-19T16:48:33.838,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/854. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:33.838,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",854,active,0} [ns_server:debug,2014-08-19T16:48:33.906,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 852. Nacking mccouch update. [views:debug,2014-08-19T16:48:33.906,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/852. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:33.906,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",852,active,0} [ns_server:debug,2014-08-19T16:48:33.906,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,1016,990,974,958,942,926,910,894,878,862,1022,1006,996, 980,964,948,932,916,900,884,868,852,1012,986,970,954,938,922,906,890,874,858, 1018,1002,992,976,960,944,928,912,896,880,864,1008,998,982,966,950,934,918, 902,886,870,854,1014,988,972,956,940,924,908,892,876,860,1020,1004,1023,994, 978,962,946,930,914,898,882,866,1010,968,936,904,872,1000] [views:debug,2014-08-19T16:48:33.939,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/852. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:33.939,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",852,active,0} [ns_server:debug,2014-08-19T16:48:34.006,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 850. Nacking mccouch update. [views:debug,2014-08-19T16:48:34.006,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/850. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:34.007,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",850,active,0} [ns_server:debug,2014-08-19T16:48:34.007,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,1016,990,974,958,942,926,910,894,878,862,1022,1006,996, 980,964,948,932,916,900,884,868,852,1012,986,970,954,938,922,906,890,874,858, 1018,1002,992,976,960,944,928,912,896,880,864,1008,998,982,966,950,934,918, 902,886,870,854,1014,988,972,956,940,924,908,892,876,860,1020,1004,1023,994, 978,962,946,930,914,898,882,866,850,1010,968,936,904,872,1000] [views:debug,2014-08-19T16:48:34.040,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/850. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:34.040,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",850,active,0} [ns_server:debug,2014-08-19T16:48:34.107,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 848. Nacking mccouch update. [views:debug,2014-08-19T16:48:34.107,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/848. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:34.107,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",848,active,0} [ns_server:debug,2014-08-19T16:48:34.107,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,1016,990,974,958,942,926,910,894,878,862,1022,1006,996, 980,964,948,932,916,900,884,868,852,1012,986,970,954,938,922,906,890,874,858, 1018,1002,992,976,960,944,928,912,896,880,864,848,1008,998,982,966,950,934, 918,902,886,870,854,1014,988,972,956,940,924,908,892,876,860,1020,1004,1023, 994,978,962,946,930,914,898,882,866,850,1010,968,936,904,872,1000] [views:debug,2014-08-19T16:48:34.141,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/848. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:34.141,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",848,active,0} [ns_server:debug,2014-08-19T16:48:34.297,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 846. Nacking mccouch update. [views:debug,2014-08-19T16:48:34.297,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/846. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:34.297,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",846,active,0} [ns_server:debug,2014-08-19T16:48:34.297,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,1016,990,974,958,942,926,910,894,878,862,846,1022,1006, 996,980,964,948,932,916,900,884,868,852,1012,986,970,954,938,922,906,890,874, 858,1018,1002,992,976,960,944,928,912,896,880,864,848,1008,998,982,966,950, 934,918,902,886,870,854,1014,988,972,956,940,924,908,892,876,860,1020,1004, 1023,994,978,962,946,930,914,898,882,866,850,1010,968,936,904,872,1000] [views:debug,2014-08-19T16:48:34.381,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/846. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:34.381,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",846,active,0} [ns_server:debug,2014-08-19T16:48:34.539,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 844. Nacking mccouch update. [views:debug,2014-08-19T16:48:34.539,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/844. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:34.539,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",844,active,0} [ns_server:debug,2014-08-19T16:48:34.540,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,1016,990,974,958,942,926,910,894,878,862,846,1022,1006, 996,980,964,948,932,916,900,884,868,852,1012,986,970,954,938,922,906,890,874, 858,1018,1002,992,976,960,944,928,912,896,880,864,848,1008,998,982,966,950, 934,918,902,886,870,854,1014,988,972,956,940,924,908,892,876,860,844,1020, 1004,1023,994,978,962,946,930,914,898,882,866,850,1010,968,936,904,872,1000] [views:debug,2014-08-19T16:48:34.607,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/844. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:34.607,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",844,active,0} [ns_server:debug,2014-08-19T16:48:34.798,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 842. Nacking mccouch update. [views:debug,2014-08-19T16:48:34.798,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/842. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:34.798,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",842,active,0} [ns_server:debug,2014-08-19T16:48:34.799,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,1016,990,974,958,942,926,910,894,878,862,846,1022,1006, 996,980,964,948,932,916,900,884,868,852,1012,986,970,954,938,922,906,890,874, 858,842,1018,1002,992,976,960,944,928,912,896,880,864,848,1008,998,982,966, 950,934,918,902,886,870,854,1014,988,972,956,940,924,908,892,876,860,844, 1020,1004,1023,994,978,962,946,930,914,898,882,866,850,1010,968,936,904,872, 1000] [views:debug,2014-08-19T16:48:34.866,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/842. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:34.866,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",842,active,0} [ns_server:debug,2014-08-19T16:48:35.016,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 840. Nacking mccouch update. [views:debug,2014-08-19T16:48:35.016,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/840. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:35.016,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",840,active,0} [ns_server:debug,2014-08-19T16:48:35.016,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,1016,990,974,958,942,926,910,894,878,862,846,1022,1006, 996,980,964,948,932,916,900,884,868,852,1012,986,970,954,938,922,906,890,874, 858,842,1018,1002,992,976,960,944,928,912,896,880,864,848,1008,998,982,966, 950,934,918,902,886,870,854,1014,988,972,956,940,924,908,892,876,860,844, 1020,1004,1023,994,978,962,946,930,914,898,882,866,850,1010,968,936,904,872, 840,1000] [views:debug,2014-08-19T16:48:35.083,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/840. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:35.083,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",840,active,0} [ns_server:debug,2014-08-19T16:48:35.241,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 838. Nacking mccouch update. [views:debug,2014-08-19T16:48:35.241,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/838. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:35.241,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",838,active,0} [ns_server:debug,2014-08-19T16:48:35.241,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,1016,990,974,958,942,926,910,894,878,862,846,1022,1006, 996,980,964,948,932,916,900,884,868,852,1012,986,970,954,938,922,906,890,874, 858,842,1018,1002,992,976,960,944,928,912,896,880,864,848,1008,998,982,966, 950,934,918,902,886,870,854,838,1014,988,972,956,940,924,908,892,876,860,844, 1020,1004,1023,994,978,962,946,930,914,898,882,866,850,1010,968,936,904,872, 840,1000] [views:debug,2014-08-19T16:48:35.275,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/838. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:35.275,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",838,active,0} [ns_server:debug,2014-08-19T16:48:35.342,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 836. Nacking mccouch update. [views:debug,2014-08-19T16:48:35.342,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/836. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:35.342,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",836,active,0} [ns_server:debug,2014-08-19T16:48:35.342,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,1016,990,974,958,942,926,910,894,878,862,846,1022,1006, 996,980,964,948,932,916,900,884,868,852,836,1012,986,970,954,938,922,906,890, 874,858,842,1018,1002,992,976,960,944,928,912,896,880,864,848,1008,998,982, 966,950,934,918,902,886,870,854,838,1014,988,972,956,940,924,908,892,876,860, 844,1020,1004,1023,994,978,962,946,930,914,898,882,866,850,1010,968,936,904, 872,840,1000] [views:debug,2014-08-19T16:48:35.375,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/836. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:35.376,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",836,active,0} [ns_server:debug,2014-08-19T16:48:35.444,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 834. Nacking mccouch update. [views:debug,2014-08-19T16:48:35.444,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/834. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:35.444,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",834,active,0} [ns_server:debug,2014-08-19T16:48:35.444,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,1016,990,958,926,894,862,1022,996,980,964,948,932,916, 900,884,868,852,836,1012,986,970,954,938,922,906,890,874,858,842,1018,1002, 992,976,960,944,928,912,896,880,864,848,1008,998,982,966,950,934,918,902,886, 870,854,838,1014,988,972,956,940,924,908,892,876,860,844,1020,1004,1023,994, 978,962,946,930,914,898,882,866,850,834,1010,968,936,904,872,840,1000,974, 942,910,878,846,1006] [views:debug,2014-08-19T16:48:35.478,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/834. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:35.478,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",834,active,0} [ns_server:debug,2014-08-19T16:48:35.553,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 832. Nacking mccouch update. [views:debug,2014-08-19T16:48:35.553,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/832. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:35.553,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",832,active,0} [ns_server:debug,2014-08-19T16:48:35.553,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,1016,990,958,926,894,862,1022,996,980,964,948,932,916, 900,884,868,852,836,1012,986,970,954,938,922,906,890,874,858,842,1018,1002, 992,976,960,944,928,912,896,880,864,848,832,1008,998,982,966,950,934,918,902, 886,870,854,838,1014,988,972,956,940,924,908,892,876,860,844,1020,1004,1023, 994,978,962,946,930,914,898,882,866,850,834,1010,968,936,904,872,840,1000, 974,942,910,878,846,1006] [views:debug,2014-08-19T16:48:35.587,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/832. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:35.587,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",832,active,0} [ns_server:debug,2014-08-19T16:48:35.662,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 830. Nacking mccouch update. [views:debug,2014-08-19T16:48:35.662,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/830. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:35.662,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",830,active,0} [ns_server:debug,2014-08-19T16:48:35.662,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,1016,990,958,926,894,862,830,1022,996,980,964,948,932, 916,900,884,868,852,836,1012,986,970,954,938,922,906,890,874,858,842,1018, 1002,992,976,960,944,928,912,896,880,864,848,832,1008,998,982,966,950,934, 918,902,886,870,854,838,1014,988,972,956,940,924,908,892,876,860,844,1020, 1004,1023,994,978,962,946,930,914,898,882,866,850,834,1010,968,936,904,872, 840,1000,974,942,910,878,846,1006] [views:debug,2014-08-19T16:48:35.696,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/830. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:35.696,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",830,active,0} [ns_server:debug,2014-08-19T16:48:35.850,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 828. Nacking mccouch update. [views:debug,2014-08-19T16:48:35.851,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/828. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:35.851,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",828,active,0} [ns_server:debug,2014-08-19T16:48:35.851,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,1016,990,958,926,894,862,830,1022,996,980,964,948,932, 916,900,884,868,852,836,1012,986,970,954,938,922,906,890,874,858,842,1018, 1002,992,976,960,944,928,912,896,880,864,848,832,1008,998,982,966,950,934, 918,902,886,870,854,838,1014,988,972,956,940,924,908,892,876,860,844,828, 1020,1004,1023,994,978,962,946,930,914,898,882,866,850,834,1010,968,936,904, 872,840,1000,974,942,910,878,846,1006] [views:debug,2014-08-19T16:48:35.918,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/828. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:35.918,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",828,active,0} [ns_server:debug,2014-08-19T16:48:36.068,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 826. Nacking mccouch update. [views:debug,2014-08-19T16:48:36.068,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/826. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:36.068,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",826,active,0} [ns_server:debug,2014-08-19T16:48:36.068,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,1016,990,958,926,894,862,830,1022,996,980,964,948,932, 916,900,884,868,852,836,1012,986,970,954,938,922,906,890,874,858,842,826, 1018,1002,992,976,960,944,928,912,896,880,864,848,832,1008,998,982,966,950, 934,918,902,886,870,854,838,1014,988,972,956,940,924,908,892,876,860,844,828, 1020,1004,1023,994,978,962,946,930,914,898,882,866,850,834,1010,968,936,904, 872,840,1000,974,942,910,878,846,1006] [views:debug,2014-08-19T16:48:36.152,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/826. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:36.152,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",826,active,0} [ns_server:debug,2014-08-19T16:48:36.310,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 824. Nacking mccouch update. [views:debug,2014-08-19T16:48:36.310,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/824. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:36.310,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",824,active,0} [ns_server:debug,2014-08-19T16:48:36.311,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,1016,990,958,926,894,862,830,1022,996,980,964,948, 932,916,900,884,868,852,836,1012,986,970,954,938,922,906,890,874,858,842,826, 1018,1002,992,976,960,944,928,912,896,880,864,848,832,1008,998,982,966,950, 934,918,902,886,870,854,838,1014,988,972,956,940,924,908,892,876,860,844,828, 1020,1004,1023,994,978,962,946,930,914,898,882,866,850,834,1010,968,936,904, 872,840,1000,974,942,910,878,846,1006] [views:debug,2014-08-19T16:48:36.394,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/824. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:36.394,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",824,active,0} [ns_server:debug,2014-08-19T16:48:36.561,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 822. Nacking mccouch update. [views:debug,2014-08-19T16:48:36.561,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/822. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:36.561,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",822,active,0} [ns_server:debug,2014-08-19T16:48:36.562,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,1016,990,958,926,894,862,830,1022,996,980,964,948, 932,916,900,884,868,852,836,1012,986,970,954,938,922,906,890,874,858,842,826, 1018,1002,992,976,960,944,928,912,896,880,864,848,832,1008,998,982,966,950, 934,918,902,886,870,854,838,822,1014,988,972,956,940,924,908,892,876,860,844, 828,1020,1004,1023,994,978,962,946,930,914,898,882,866,850,834,1010,968,936, 904,872,840,1000,974,942,910,878,846,1006] [views:debug,2014-08-19T16:48:36.645,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/822. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:36.645,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",822,active,0} [ns_server:debug,2014-08-19T16:48:36.796,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 820. Nacking mccouch update. [views:debug,2014-08-19T16:48:36.796,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/820. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:36.796,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",820,active,0} [ns_server:debug,2014-08-19T16:48:36.796,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,1016,990,958,926,894,862,830,1022,996,980,964,948, 932,916,900,884,868,852,836,820,1012,986,970,954,938,922,906,890,874,858,842, 826,1018,1002,992,976,960,944,928,912,896,880,864,848,832,1008,998,982,966, 950,934,918,902,886,870,854,838,822,1014,988,972,956,940,924,908,892,876,860, 844,828,1020,1004,1023,994,978,962,946,930,914,898,882,866,850,834,1010,968, 936,904,872,840,1000,974,942,910,878,846,1006] [views:debug,2014-08-19T16:48:36.829,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/820. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:36.830,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",820,active,0} [ns_server:debug,2014-08-19T16:48:36.896,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 818. Nacking mccouch update. [views:debug,2014-08-19T16:48:36.896,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/818. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:36.897,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",818,active,0} [ns_server:debug,2014-08-19T16:48:36.897,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,1016,990,958,926,894,862,830,1022,996,980,964,948, 932,916,900,884,868,852,836,820,1012,986,970,954,938,922,906,890,874,858,842, 826,1018,1002,992,976,960,944,928,912,896,880,864,848,832,1008,998,982,966, 950,934,918,902,886,870,854,838,822,1014,988,972,956,940,924,908,892,876,860, 844,828,1020,1004,1023,994,978,962,946,930,914,898,882,866,850,834,818,1010, 968,936,904,872,840,1000,974,942,910,878,846,1006] [views:debug,2014-08-19T16:48:36.930,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/818. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:36.931,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",818,active,0} [ns_server:debug,2014-08-19T16:48:36.997,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 816. Nacking mccouch update. [views:debug,2014-08-19T16:48:36.997,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/816. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:36.997,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",816,active,0} [ns_server:debug,2014-08-19T16:48:36.997,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,1016,990,958,926,894,862,830,1022,996,980,964,948, 932,916,900,884,868,852,836,820,1012,986,970,954,938,922,906,890,874,858,842, 826,1018,1002,992,976,960,944,928,912,896,880,864,848,832,816,1008,998,982, 966,950,934,918,902,886,870,854,838,822,1014,988,972,956,940,924,908,892,876, 860,844,828,1020,1004,1023,994,978,962,946,930,914,898,882,866,850,834,818, 1010,968,936,904,872,840,1000,974,942,910,878,846,1006] [views:debug,2014-08-19T16:48:37.031,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/816. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:37.031,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",816,active,0} [ns_server:debug,2014-08-19T16:48:37.098,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 814. Nacking mccouch update. [views:debug,2014-08-19T16:48:37.098,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/814. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:37.098,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",814,active,0} [ns_server:debug,2014-08-19T16:48:37.098,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,1016,990,958,926,894,862,830,1022,996,964,932,900, 868,836,986,970,954,938,922,906,890,874,858,842,826,1018,1002,992,976,960, 944,928,912,896,880,864,848,832,816,1008,998,982,966,950,934,918,902,886,870, 854,838,822,1014,988,972,956,940,924,908,892,876,860,844,828,1020,1004,1023, 994,978,962,946,930,914,898,882,866,850,834,818,1010,968,936,904,872,840, 1000,974,942,910,878,846,814,1006,980,948,916,884,852,820,1012] [views:debug,2014-08-19T16:48:37.132,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/814. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:37.132,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",814,active,0} [ns_server:debug,2014-08-19T16:48:37.199,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 812. Nacking mccouch update. [views:debug,2014-08-19T16:48:37.199,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/812. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:37.199,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",812,active,0} [ns_server:debug,2014-08-19T16:48:37.199,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,1016,990,958,926,894,862,830,1022,996,964,932,900, 868,836,986,970,954,938,922,906,890,874,858,842,826,1018,1002,992,976,960, 944,928,912,896,880,864,848,832,816,1008,998,982,966,950,934,918,902,886,870, 854,838,822,1014,988,972,956,940,924,908,892,876,860,844,828,812,1020,1004, 1023,994,978,962,946,930,914,898,882,866,850,834,818,1010,968,936,904,872, 840,1000,974,942,910,878,846,814,1006,980,948,916,884,852,820,1012] [views:debug,2014-08-19T16:48:37.247,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/812. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:37.247,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",812,active,0} [ns_server:debug,2014-08-19T16:48:37.430,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 810. Nacking mccouch update. [views:debug,2014-08-19T16:48:37.431,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/810. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:37.431,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",810,active,0} [ns_server:debug,2014-08-19T16:48:37.431,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,1016,990,958,926,894,862,830,1022,996,964,932,900, 868,836,986,970,954,938,922,906,890,874,858,842,826,810,1018,1002,992,976, 960,944,928,912,896,880,864,848,832,816,1008,998,982,966,950,934,918,902,886, 870,854,838,822,1014,988,972,956,940,924,908,892,876,860,844,828,812,1020, 1004,1023,994,978,962,946,930,914,898,882,866,850,834,818,1010,968,936,904, 872,840,1000,974,942,910,878,846,814,1006,980,948,916,884,852,820,1012] [views:debug,2014-08-19T16:48:37.514,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/810. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:37.514,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",810,active,0} [ns_server:debug,2014-08-19T16:48:37.689,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 808. Nacking mccouch update. [views:debug,2014-08-19T16:48:37.690,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/808. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:37.690,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",808,active,0} [ns_server:debug,2014-08-19T16:48:37.690,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,1016,990,958,926,894,862,830,1022,996,964,932,900, 868,836,986,970,954,938,922,906,890,874,858,842,826,810,1018,1002,992,976, 960,944,928,912,896,880,864,848,832,816,1008,998,982,966,950,934,918,902,886, 870,854,838,822,1014,988,972,956,940,924,908,892,876,860,844,828,812,1020, 1004,1023,994,978,962,946,930,914,898,882,866,850,834,818,1010,968,936,904, 872,840,808,1000,974,942,910,878,846,814,1006,980,948,916,884,852,820,1012] [views:debug,2014-08-19T16:48:37.773,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/808. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:37.773,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",808,active,0} [ns_server:debug,2014-08-19T16:48:37.949,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 806. Nacking mccouch update. [views:debug,2014-08-19T16:48:37.949,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/806. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:37.949,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",806,active,0} [ns_server:debug,2014-08-19T16:48:37.949,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,1016,990,958,926,894,862,830,1022,996,964,932,900, 868,836,986,970,954,938,922,906,890,874,858,842,826,810,1018,1002,992,976, 960,944,928,912,896,880,864,848,832,816,1008,998,982,966,950,934,918,902,886, 870,854,838,822,806,1014,988,972,956,940,924,908,892,876,860,844,828,812, 1020,1004,1023,994,978,962,946,930,914,898,882,866,850,834,818,1010,968,936, 904,872,840,808,1000,974,942,910,878,846,814,1006,980,948,916,884,852,820, 1012] [views:debug,2014-08-19T16:48:38.033,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/806. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:38.033,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",806,active,0} [ns_server:debug,2014-08-19T16:48:38.208,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 804. Nacking mccouch update. [views:debug,2014-08-19T16:48:38.208,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/804. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:38.208,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",804,active,0} [ns_server:debug,2014-08-19T16:48:38.208,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,1016,990,958,926,894,862,830,1022,996,964,932,900, 868,836,804,986,970,954,938,922,906,890,874,858,842,826,810,1018,1002,992, 976,960,944,928,912,896,880,864,848,832,816,1008,998,982,966,950,934,918,902, 886,870,854,838,822,806,1014,988,972,956,940,924,908,892,876,860,844,828,812, 1020,1004,1023,994,978,962,946,930,914,898,882,866,850,834,818,1010,968,936, 904,872,840,808,1000,974,942,910,878,846,814,1006,980,948,916,884,852,820, 1012] [views:debug,2014-08-19T16:48:38.292,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/804. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:38.292,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",804,active,0} [ns_server:debug,2014-08-19T16:48:38.416,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 802. Nacking mccouch update. [views:debug,2014-08-19T16:48:38.416,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/802. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:38.416,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",802,active,0} [ns_server:debug,2014-08-19T16:48:38.416,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,1016,990,958,926,894,862,830,1022,996,964,932,900, 868,836,804,986,970,954,938,922,906,890,874,858,842,826,810,1018,1002,992, 976,960,944,928,912,896,880,864,848,832,816,1008,998,982,966,950,934,918,902, 886,870,854,838,822,806,1014,988,972,956,940,924,908,892,876,860,844,828,812, 1020,1004,1023,994,978,962,946,930,914,898,882,866,850,834,818,802,1010,968, 936,904,872,840,808,1000,974,942,910,878,846,814,1006,980,948,916,884,852, 820,1012] [views:debug,2014-08-19T16:48:38.450,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/802. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:38.450,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",802,active,0} [ns_server:debug,2014-08-19T16:48:38.517,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 800. Nacking mccouch update. [views:debug,2014-08-19T16:48:38.517,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/800. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:38.517,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",800,active,0} [ns_server:debug,2014-08-19T16:48:38.517,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,1016,990,958,926,894,862,830,1022,996,964,932,900, 868,836,804,986,970,954,938,922,906,890,874,858,842,826,810,1018,1002,992, 976,960,944,928,912,896,880,864,848,832,816,800,1008,998,982,966,950,934,918, 902,886,870,854,838,822,806,1014,988,972,956,940,924,908,892,876,860,844,828, 812,1020,1004,1023,994,978,962,946,930,914,898,882,866,850,834,818,802,1010, 968,936,904,872,840,808,1000,974,942,910,878,846,814,1006,980,948,916,884, 852,820,1012] [views:debug,2014-08-19T16:48:38.550,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/800. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:38.550,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",800,active,0} [ns_server:debug,2014-08-19T16:48:38.617,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 798. Nacking mccouch update. [views:debug,2014-08-19T16:48:38.617,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/798. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:38.617,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",798,active,0} [ns_server:debug,2014-08-19T16:48:38.618,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,1016,990,958,926,894,862,830,798,1022,996,964,932, 900,868,836,804,986,970,954,938,922,906,890,874,858,842,826,810,1018,1002, 992,976,960,944,928,912,896,880,864,848,832,816,800,1008,998,982,966,950,934, 918,902,886,870,854,838,822,806,1014,988,972,956,940,924,908,892,876,860,844, 828,812,1020,1004,1023,994,978,962,946,930,914,898,882,866,850,834,818,802, 1010,968,936,904,872,840,808,1000,974,942,910,878,846,814,1006,980,948,916, 884,852,820,1012] [views:debug,2014-08-19T16:48:38.652,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/798. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:38.652,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",798,active,0} [ns_server:debug,2014-08-19T16:48:38.720,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 796. Nacking mccouch update. [views:debug,2014-08-19T16:48:38.720,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/796. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:38.720,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",796,active,0} [ns_server:debug,2014-08-19T16:48:38.720,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,1016,990,958,926,894,862,830,798,1022,996,964,932, 900,868,836,804,986,970,954,938,922,906,890,874,858,842,826,810,1018,1002, 992,976,960,944,928,912,896,880,864,848,832,816,800,1008,998,982,966,950,934, 918,902,886,870,854,838,822,806,1014,988,972,956,940,924,908,892,876,860,844, 828,812,796,1020,1004,1023,994,978,962,946,930,914,898,882,866,850,834,818, 802,1010,968,936,904,872,840,808,1000,974,942,910,878,846,814,1006,980,948, 916,884,852,820,1012] [views:debug,2014-08-19T16:48:38.754,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/796. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:38.754,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",796,active,0} [ns_server:debug,2014-08-19T16:48:38.820,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 794. Nacking mccouch update. [views:debug,2014-08-19T16:48:38.820,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/794. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:38.820,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",794,active,0} [ns_server:debug,2014-08-19T16:48:38.821,ns_1@127.0.0.1:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,1016,990,958,926,894,862,830,798,1022,996,964,932, 900,868,836,804,970,938,906,874,842,810,1002,992,976,960,944,928,912,896,880, 864,848,832,816,800,1008,998,982,966,950,934,918,902,886,870,854,838,822,806, 1014,988,972,956,940,924,908,892,876,860,844,828,812,796,1020,1004,1023,994, 978,962,946,930,914,898,882,866,850,834,818,802,1010,968,936,904,872,840,808, 1000,974,942,910,878,846,814,1006,980,948,916,884,852,820,1012,986,954,922, 890,858,826,794,1018] [views:debug,2014-08-19T16:48:38.892,ns_1@127.0.0.1:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/794. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:38.892,ns_1@127.0.0.1:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",794,active,0} [cluster:debug,2014-08-19T16:48:39.051,ns_1@127.0.0.1:ns_cluster<0.17894.0>:ns_cluster:handle_call:153]handling add_node("10.242.238.89", 8091, undefined, ..) [cluster:info,2014-08-19T16:48:39.051,ns_1@127.0.0.1:ns_cluster<0.17894.0>:ns_cluster:do_change_address:398]Decided to change address to "10.242.238.88" [user:warn,2014-08-19T16:48:39.052,nonode@nohost:ns_node_disco<0.17920.0>:ns_node_disco:handle_info:165]Node nonode@nohost saw that node 'ns_1@127.0.0.1' went down. Details: [{nodedown_reason, net_kernel_terminated}] [error_logger:info,2014-08-19T16:48:39.052,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.20965.0>}, {name,ns_ports_setup}, {mfa,{ns_ports_setup,start,[]}}, {restart_type,{permanent,4}}, {shutdown,brutal_kill}, {child_type,worker}] [ns_server:info,2014-08-19T16:48:39.053,nonode@nohost:dist_manager<0.268.0>:dist_manager:do_adjust_address:249]Adjusted IP to "10.242.238.88" [ns_server:debug,2014-08-19T16:48:39.052,ns_1@127.0.0.1:<0.18040.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.18038.0>} exited with reason noconnection [ns_server:info,2014-08-19T16:48:39.053,nonode@nohost:dist_manager<0.268.0>:dist_manager:bringup:230]Attempting to bring up net_kernel with name 'ns_1@10.242.238.88' [error_logger:info,2014-08-19T16:48:39.053,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.20968.0>}, {name,erl_epmd}, {mfargs,{erl_epmd,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2014-08-19T16:48:39.053,nonode@nohost:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.20969.0>}, {name,auth}, {mfargs,{auth,start_link,[]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:48:39.054,ns_1@10.242.238.88:<0.19195.0>:capi_set_view_manager:nodeup_monitoring_loop:176]got nodeup event. Considering ddocs replication [user:info,2014-08-19T16:48:39.054,ns_1@10.242.238.88:ns_node_disco<0.17920.0>:ns_node_disco:handle_info:159]Node 'ns_1@10.242.238.88' saw that node 'ns_1@10.242.238.88' came up. Tags: [] [ns_server:debug,2014-08-19T16:48:39.054,ns_1@10.242.238.88:<0.18065.0>:xdc_rdoc_replication_srv:nodeup_monitoring_loop:46]got nodeup event. Considering rdocs replication [ns_server:debug,2014-08-19T16:48:39.054,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:48:39.054,ns_1@10.242.238.88:dist_manager<0.268.0>:dist_manager:save_node:143]saving node to "/opt/couchbase/var/lib/couchbase/couchbase-server.node" [ns_server:debug,2014-08-19T16:48:39.054,ns_1@10.242.238.88:xdc_rdoc_replication_srv<0.18064.0>:xdc_rdoc_replication_srv:handle_info:154]doing replicate_newnodes_docs [error_logger:info,2014-08-19T16:48:39.054,ns_1@10.242.238.88:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,net_sup} started: [{pid,<0.20970.0>}, {name,net_kernel}, {mfargs, {net_kernel,start_link, [['ns_1@10.242.238.88',longnames]]}}, {restart_type,permanent}, {shutdown,2000}, {child_type,worker}] [error_logger:info,2014-08-19T16:48:39.055,ns_1@10.242.238.88:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,kernel_sup} started: [{pid,<0.20967.0>}, {name,net_sup_dynamic}, {mfargs, {erl_distribution,start_link, [['ns_1@10.242.238.88',longnames]]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,supervisor}] [ns_server:warn,2014-08-19T16:48:39.056,ns_1@10.242.238.88:xdc_rdoc_replication_srv<0.18064.0>:xdc_rdoc_replication_srv:handle_info:150]Remote server node {xdc_rdoc_replication_srv,'ns_1@127.0.0.1'} process down: noconnection [ns_server:warn,2014-08-19T16:48:39.056,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:355]Remote server node {'capi_ddoc_replication_srv-default','ns_1@127.0.0.1'} process down: noconnection [ns_server:debug,2014-08-19T16:48:39.067,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 792. Nacking mccouch update. [views:debug,2014-08-19T16:48:39.067,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/792. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:39.068,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",792,active,0} [ns_server:debug,2014-08-19T16:48:39.068,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,792,1016,990,958,926,894,862,830,798,1022,996,964, 932,900,868,836,804,970,938,906,874,842,810,1002,992,976,960,944,928,912,896, 880,864,848,832,816,800,1008,998,982,966,950,934,918,902,886,870,854,838,822, 806,1014,988,972,956,940,924,908,892,876,860,844,828,812,796,1020,1004,1023, 994,978,962,946,930,914,898,882,866,850,834,818,802,1010,968,936,904,872,840, 808,1000,974,942,910,878,846,814,1006,980,948,916,884,852,820,1012,986,954, 922,890,858,826,794,1018] [ns_server:debug,2014-08-19T16:48:39.079,ns_1@10.242.238.88:dist_manager<0.268.0>:dist_manager:bringup:238]Attempted to save node name to disk: ok [ns_server:info,2014-08-19T16:48:39.079,ns_1@10.242.238.88:dist_manager<0.268.0>:dist_manager:do_adjust_address:253]Re-setting cookie {xyzevwdfypcplvpp,'ns_1@10.242.238.88'} [ns_server:info,2014-08-19T16:48:39.079,ns_1@10.242.238.88:dist_manager<0.268.0>:dist_manager:save_address_config:138]Deleting irrelevant ip file "/opt/couchbase/var/lib/couchbase/ip_start": ok [ns_server:info,2014-08-19T16:48:39.079,ns_1@10.242.238.88:dist_manager<0.268.0>:dist_manager:save_address_config:139]saving ip config to "/opt/couchbase/var/lib/couchbase/ip" [ns_server:info,2014-08-19T16:48:39.104,ns_1@10.242.238.88:dist_manager<0.268.0>:dist_manager:do_adjust_address:260]Persisted the address successfully [cluster:debug,2014-08-19T16:48:39.104,ns_1@10.242.238.88:<0.20963.0>:ns_cluster:maybe_rename:431]Renaming node from 'ns_1@127.0.0.1' to 'ns_1@10.242.238.88'. [cluster:debug,2014-08-19T16:48:39.106,ns_1@10.242.238.88:ns_config<0.17898.0>:ns_cluster:rename_node_in_config:443]renaming node conf buckets -> buckets: [{configs,[{"default", [{uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@127.0.0.1']}, {map,[['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]]}, {map_opts_hash,133465355}]}]}] -> [{configs,[{"default", [{uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88']}, {map,[['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined]]}, {map_opts_hash,133465355}]}]}] [cluster:debug,2014-08-19T16:48:39.126,ns_1@10.242.238.88:ns_config<0.17898.0>:ns_cluster:rename_node_in_config:443]renaming node conf vbucket_map_history -> vbucket_map_history: [{[['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined], ['ns_1@127.0.0.1',undefined]], [{replication_topology,star},{tags,undefined},{max_slaves,10}]}] -> [{[['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined]], [{replication_topology,star},{tags,undefined},{max_slaves,10}]}] [cluster:debug,2014-08-19T16:48:39.142,ns_1@10.242.238.88:ns_config<0.17898.0>:ns_cluster:rename_node_in_config:443]renaming node conf nodes_wanted -> nodes_wanted: ['ns_1@127.0.0.1'] -> ['ns_1@10.242.238.88'] [cluster:debug,2014-08-19T16:48:39.142,ns_1@10.242.238.88:ns_config<0.17898.0>:ns_cluster:rename_node_in_config:443]renaming node conf server_groups -> server_groups: [[{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@127.0.0.1']}]] -> [[{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@10.242.238.88']}]] [cluster:debug,2014-08-19T16:48:39.142,ns_1@10.242.238.88:ns_config<0.17898.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',capi_port} -> {node, 'ns_1@10.242.238.88', capi_port}: 8092 -> 8092 [cluster:debug,2014-08-19T16:48:39.142,ns_1@10.242.238.88:ns_config<0.17898.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',compaction_daemon} -> {node, 'ns_1@10.242.238.88', compaction_daemon}: [{check_interval,30},{min_file_size,131072}] -> [{check_interval,30},{min_file_size,131072}] [cluster:debug,2014-08-19T16:48:39.142,ns_1@10.242.238.88:ns_config<0.17898.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',config_version} -> {node, 'ns_1@10.242.238.88', config_version}: {2,3,0} -> {2,3,0} [cluster:debug,2014-08-19T16:48:39.142,ns_1@10.242.238.88:ns_config<0.17898.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',isasl} -> {node, 'ns_1@10.242.238.88', isasl}: [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [cluster:debug,2014-08-19T16:48:39.143,ns_1@10.242.238.88:ns_config<0.17898.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',membership} -> {node, 'ns_1@10.242.238.88', membership}: active -> active [views:debug,2014-08-19T16:48:39.143,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/792. Updated state: active (0) [cluster:debug,2014-08-19T16:48:39.143,ns_1@10.242.238.88:ns_config<0.17898.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',memcached} -> {node, 'ns_1@10.242.238.88', memcached}: [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T16:48:39.143,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",792,active,0} [cluster:debug,2014-08-19T16:48:39.143,ns_1@10.242.238.88:ns_config<0.17898.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',moxi} -> {node, 'ns_1@10.242.238.88',moxi}: [{port,11211},{verbosity,[]}] -> [{port,11211},{verbosity,[]}] [cluster:debug,2014-08-19T16:48:39.144,ns_1@10.242.238.88:ns_config<0.17898.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',ns_log} -> {node, 'ns_1@10.242.238.88', ns_log}: [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [cluster:debug,2014-08-19T16:48:39.144,ns_1@10.242.238.88:ns_config<0.17898.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',port_servers} -> {node, 'ns_1@10.242.238.88', port_servers}: [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [cluster:debug,2014-08-19T16:48:39.145,ns_1@10.242.238.88:ns_config<0.17898.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',rest} -> {node, 'ns_1@10.242.238.88',rest}: [{port,8091},{port_meta,global}] -> [{port,8091},{port_meta,global}] [cluster:debug,2014-08-19T16:48:39.145,ns_1@10.242.238.88:ns_config<0.17898.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',ssl_capi_port} -> {node, 'ns_1@10.242.238.88', ssl_capi_port}: 18092 -> 18092 [cluster:debug,2014-08-19T16:48:39.145,ns_1@10.242.238.88:ns_config<0.17898.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',ssl_proxy_downstream_port} -> {node, 'ns_1@10.242.238.88', ssl_proxy_downstream_port}: 11214 -> 11214 [cluster:debug,2014-08-19T16:48:39.145,ns_1@10.242.238.88:ns_config<0.17898.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',ssl_proxy_upstream_port} -> {node, 'ns_1@10.242.238.88', ssl_proxy_upstream_port}: 11215 -> 11215 [cluster:debug,2014-08-19T16:48:39.145,ns_1@10.242.238.88:ns_config<0.17898.0>:ns_cluster:rename_node_in_config:443]renaming node conf {node,'ns_1@127.0.0.1',ssl_rest_port} -> {node, 'ns_1@10.242.238.88', ssl_rest_port}: 18091 -> 18091 [ns_server:debug,2014-08-19T16:48:39.145,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.146,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',ssl_rest_port} -> 18091 [ns_server:debug,2014-08-19T16:48:39.146,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T16:48:39.146,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T16:48:39.146,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T16:48:39.147,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.147,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets,nodes_wanted,server_groups, vbucket_map_history, {node,'ns_1@10.242.238.88',capi_port}, {node,'ns_1@10.242.238.88',compaction_daemon}, {node,'ns_1@10.242.238.88',config_version}, {node,'ns_1@10.242.238.88',isasl}, {node,'ns_1@10.242.238.88',membership}, {node,'ns_1@10.242.238.88',memcached}, {node,'ns_1@10.242.238.88',moxi}, {node,'ns_1@10.242.238.88',ns_log}, {node,'ns_1@10.242.238.88',port_servers}, {node,'ns_1@10.242.238.88',rest}, {node,'ns_1@10.242.238.88',ssl_capi_port}, {node,'ns_1@10.242.238.88', ssl_proxy_downstream_port}, {node,'ns_1@10.242.238.88', ssl_proxy_upstream_port}, {node,'ns_1@10.242.238.88',ssl_rest_port}]..) [ns_server:debug,2014-08-19T16:48:39.147,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T16:48:39.147,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.147,ns_1@10.242.238.88:ns_config_events<0.17896.0>:ns_node_disco_conf_events:handle_event:44]ns_node_disco_conf_events config on nodes_wanted [ns_server:debug,2014-08-19T16:48:39.147,ns_1@10.242.238.88:mb_master<0.17956.0>:mb_master:update_peers:506]List of peers has changed from ['ns_1@127.0.0.1'] to ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:48:39.148,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.148,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.148,ns_1@10.242.238.88:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T16:48:39.148,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.148,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T16:48:39.148,ns_1@10.242.238.88:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T16:48:39.148,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.149,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [ns_server:debug,2014-08-19T16:48:39.149,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T16:48:39.149,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.149,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.150,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.150,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T16:48:39.150,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.150,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',membership} -> active [ns_server:debug,2014-08-19T16:48:39.150,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T16:48:39.150,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.150,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',config_version} -> {2,3,0} [ns_server:debug,2014-08-19T16:48:39.150,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T16:48:39.150,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.150,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',capi_port} -> 8092 [ns_server:debug,2014-08-19T16:48:39.150,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: server_groups -> [[{uuid,<<"0">>},{name,<<"Group 1">>},{nodes,['ns_1@10.242.238.88']}]] [ns_server:debug,2014-08-19T16:48:39.150,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:48:39.150,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.151,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.151,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.151,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.151,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.152,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.151,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: vbucket_map_history -> [{[['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88'|...], [...]|...], [{replication_topology,star},{tags,undefined},{max_slaves,10}]}] [ns_server:debug,2014-08-19T16:48:39.152,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.153,ns_1@10.242.238.88:ns_node_disco_events<0.17919.0>:ns_node_disco_rep_events:handle_event:42]Detected a new nodes (['ns_1@10.242.238.88']). Moving config around. [ns_server:debug,2014-08-19T16:48:39.154,ns_1@10.242.238.88:<0.17946.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.17945.0>} exited with reason shutdown [ns_server:info,2014-08-19T16:48:39.154,ns_1@10.242.238.88:ns_node_disco_events<0.17919.0>:ns_node_disco_log:handle_event:46]ns_node_disco_log: nodes changed: ['ns_1@10.242.238.88'] [ns_server:info,2014-08-19T16:48:39.154,ns_1@10.242.238.88:mb_master<0.17956.0>:mb_master:terminate:299]Synchronously shutting down child mb_master_sup [error_logger:info,2014-08-19T16:48:39.154,ns_1@10.242.238.88:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.20988.0>}, {name,ns_doctor}, {mfa,{ns_doctor,start_link,[]}}, {restart_type,permanent}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:48:39.154,ns_1@10.242.238.88:<0.17957.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.17956.0>} exited with reason shutdown [ns_server:debug,2014-08-19T16:48:39.154,ns_1@10.242.238.88:ns_server_sup<0.17906.0>:mb_master:check_master_takeover_needed:141]Sending master node question to the following nodes: [] [ns_server:debug,2014-08-19T16:48:39.154,ns_1@10.242.238.88:ns_server_sup<0.17906.0>:mb_master:check_master_takeover_needed:143]Got replies: [] [ns_server:debug,2014-08-19T16:48:39.155,ns_1@10.242.238.88:ns_server_sup<0.17906.0>:mb_master:check_master_takeover_needed:149]Was unable to discover master, not going to force mastership takeover [user:info,2014-08-19T16:48:39.155,ns_1@10.242.238.88:mb_master<0.20995.0>:mb_master:init:86]I'm the only node, so I'm the master. [ns_server:info,2014-08-19T16:48:39.155,ns_1@10.242.238.88:ns_log<0.17910.0>:ns_log:handle_cast:183]suppressing duplicate log mb_master:undefined([<<"I'm the only node, so I'm the master.">>]) because it's been seen 1 times in the past 42.782865 secs (last seen 42.782865 secs ago [ns_server:debug,2014-08-19T16:48:39.155,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.155,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{0, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {1, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {2, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {3, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {4, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {5, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {6, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {7, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {8, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {9, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {10, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {11, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {12, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {13, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {14, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {15, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {16, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {17, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {18, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {19, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {20, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {21, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {22, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {23, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {24, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {25, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {26, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {27, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {28, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {29, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {30, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {31, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {32, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {33, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {34, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {35, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {36, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {37, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {38, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {39, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {40, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {41, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {42, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {43, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {44, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {45, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {46, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {47, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {48, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {49, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {50, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {51, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {52, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {53, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {54, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {55, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {56, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {57, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {58, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {59, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {60, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {61, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {62, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {63, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {64, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {65, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {66, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {67, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {68, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {69, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {70, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {71, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {72, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {73, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {74, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {75, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {76, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {77, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {78, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {79, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {80, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {81, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {82, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {83, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {84, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88',undefined]}, {85, ['ns_1@127.0.0.1',undefined], ['ns_1@10.242.238.88'|...]}, {86,['ns_1@127.0.0.1'|...],[...]}, {87,[...],...}, {88,...}, {...}|...]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:48:39.156,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: dynamic_config_version -> [2,5] [ns_server:debug,2014-08-19T16:48:39.156,ns_1@10.242.238.88:mb_master_sup<0.20998.0>:misc:start_singleton:986]start_singleton(gen_fsm, ns_orchestrator, [], []): started as <0.20999.0> on 'ns_1@10.242.238.88' [error_logger:info,2014-08-19T16:48:39.156,ns_1@10.242.238.88:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.20999.0>}, {name,ns_orchestrator}, {mfargs,{ns_orchestrator,start_link,[]}}, {restart_type,permanent}, {shutdown,20}, {child_type,worker}] [ns_server:debug,2014-08-19T16:48:39.156,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.156,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([dynamic_config_version]..) [ns_server:debug,2014-08-19T16:48:39.156,ns_1@10.242.238.88:mb_master_sup<0.20998.0>:misc:start_singleton:986]start_singleton(gen_server, ns_tick, [], []): started as <0.21000.0> on 'ns_1@10.242.238.88' [error_logger:info,2014-08-19T16:48:39.157,ns_1@10.242.238.88:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.21000.0>}, {name,ns_tick}, {mfargs,{ns_tick,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [ns_server:info,2014-08-19T16:48:39.157,ns_1@10.242.238.88:ns_doctor<0.20988.0>:ns_doctor:update_status:241]The following buckets became ready on node 'ns_1@10.242.238.88': ["default"] [ns_server:debug,2014-08-19T16:48:39.157,ns_1@10.242.238.88:<0.21002.0>:auto_failover:init:134]init auto_failover. [ns_server:debug,2014-08-19T16:48:39.157,ns_1@10.242.238.88:mb_master_sup<0.20998.0>:misc:start_singleton:986]start_singleton(gen_server, auto_failover, [], []): started as <0.21002.0> on 'ns_1@10.242.238.88' [cluster:info,2014-08-19T16:48:39.158,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:do_change_address:404]Renamed node. New name is 'ns_1@10.242.238.88'. [error_logger:info,2014-08-19T16:48:39.158,ns_1@10.242.238.88:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,mb_master_sup} started: [{pid,<0.21002.0>}, {name,auto_failover}, {mfargs,{auto_failover,start_link,[]}}, {restart_type,permanent}, {shutdown,10}, {child_type,worker}] [error_logger:info,2014-08-19T16:48:39.161,ns_1@10.242.238.88:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.20995.0>}, {name,mb_master}, {mfa,{mb_master,start_link,[]}}, {restart_type,permanent}, {shutdown,infinity}, {child_type,supervisor}] [cluster:debug,2014-08-19T16:48:39.161,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:do_add_node_with_connectivity:505]Posting node info to engage_cluster on {"10.242.238.89",8091}: {struct, [{<<"requestedTargetNodeHostname">>,<<"10.242.238.89">>}, {availableStorage, {struct, [{hdd, [{struct, [{path,<<"/">>}, {sizeKBytes,103212320}, {usagePercent,3}]}, {struct, [{path,<<"/dev/shm">>}, {sizeKBytes,49515824}, {usagePercent,0}]}, {struct, [{path,<<"/boot">>}, {sizeKBytes,198337}, {usagePercent,17}]}, {struct, [{path,<<"/data">>}, {sizeKBytes,329573012}, {usagePercent,1}]}, {struct, [{path,<<"/test">>}, {sizeKBytes,528447160}, {usagePercent,1}]}, {struct, [{path,<<"/var/lib/pgsql">>}, {sizeKBytes,1922866992}, {usagePercent,1}]}]}]}}, {memoryQuota,90112}, {storageTotals, {struct, [{ram, {struct, [{total,101408407552}, {quotaTotal,94489280512}, {quotaUsed,13369344000}, {used,13122936832}, {usedByData,31832760}]}}, {hdd, {struct, [{total,1969015799808}, {quotaTotal,1969015799808}, {used,19690157998}, {usedByData,1343173}, {free,1949325641810}]}}]}}, {storage, {struct, [{ssd,[]}, {hdd, [{struct, [{path,<<"/var/lib/pgsql">>}, {index_path,<<"/var/lib/pgsql">>}, {quotaMb,none}, {state,ok}]}]}]}}, {systemStats, {struct, [{cpu_utilization_rate,0.4592901878914405}, {swap_total,0}, {swap_used,0}, {mem_total,101408407552}, {mem_free,89881563136}]}}, {interestingStats, {struct, [{cmd_get,0.0}, {couch_docs_actual_disk_size,1343173}, {couch_docs_data_size,1336214}, {couch_views_actual_disk_size,0}, {couch_views_data_size,0}, {curr_items,0}, {curr_items_tot,0}, {ep_bg_fetched,0.0}, {get_hits,0.0}, {mem_used,31832760}, {ops,0.0}, {vb_replica_curr_items,0}]}}, {uptime,<<"4066">>}, {memoryTotal,101408407552}, {memoryFree,89881563136}, {mcdMemoryReserved,77368}, {mcdMemoryAllocated,77368}, {couchApiBase,<<"http://10.242.238.88:8092/">>}, {otpCookie,<<"xyzevwdfypcplvpp">>}, {clusterMembership,<<"active">>}, {status,<<"healthy">>}, {otpNode,<<"ns_1@10.242.238.88">>}, {thisNode,true}, {hostname,<<"10.242.238.88:8091">>}, {clusterCompatibility,131077}, {version,<<"2.5.1-1083-rel-enterprise">>}, {os,<<"x86_64-unknown-linux-gnu">>}, {ports, {struct, [{httpsMgmt,18091}, {httpsCAPI,18092}, {sslProxy,11214}, {proxy,11211}, {direct,11210}]}}]} [ns_server:debug,2014-08-19T16:48:39.206,ns_1@10.242.238.88:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T16:48:39.207,ns_1@10.242.238.88:<0.20984.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@10.242.238.88'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:48:39.207,ns_1@10.242.238.88:<0.20984.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@10.242.238.88'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:48:39.293,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 790. Nacking mccouch update. [views:debug,2014-08-19T16:48:39.293,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/790. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:39.294,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",790,active,0} [ns_server:debug,2014-08-19T16:48:39.295,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,792,1016,990,958,926,894,862,830,798,1022,996,964, 932,900,868,836,804,970,938,906,874,842,810,1002,992,976,960,944,928,912,896, 880,864,848,832,816,800,1008,998,982,966,950,934,918,902,886,870,854,838,822, 806,790,1014,988,972,956,940,924,908,892,876,860,844,828,812,796,1020,1004, 1023,994,978,962,946,930,914,898,882,866,850,834,818,802,1010,968,936,904, 872,840,808,1000,974,942,910,878,846,814,1006,980,948,916,884,852,820,1012, 986,954,922,890,858,826,794,1018] [views:debug,2014-08-19T16:48:39.378,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/790. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:39.378,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",790,active,0} [cluster:debug,2014-08-19T16:48:39.419,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:do_add_node_with_connectivity:512]Reply from engage_cluster on {"10.242.238.89",8091}: {ok,{struct,[{<<"availableStorage">>, {struct,[{<<"hdd">>, [{struct,[{<<"path">>,<<"/">>}, {<<"sizeKBytes">>,103212320}, {<<"usagePercent">>,3}]}, {struct,[{<<"path">>,<<"/dev/shm">>}, {<<"sizeKBytes">>,49515824}, {<<"usagePercent">>,0}]}, {struct,[{<<"path">>,<<"/boot">>}, {<<"sizeKBytes">>,198337}, {<<"usagePercent">>,17}]}, {struct,[{<<"path">>,<<"/data">>}, {<<"sizeKBytes">>,329573012}, {<<"usagePercent">>,1}]}, {struct,[{<<"path">>,<<"/test">>}, {<<"sizeKBytes">>,528447160}, {<<"usagePercent">>,1}]}, {struct,[{<<"path">>,<<"/var/lib/pgsql">>}, {<<"sizeKBytes">>,1922866992}, {<<"usagePercent">>,1}]}]}]}}, {<<"memoryQuota">>,58026}, {<<"storageTotals">>, {struct,[{<<"ram">>, {struct,[{<<"total">>,101408407552}, {<<"quotaTotal">>,60844670976}, {<<"quotaUsed">>,0}, {<<"used">>,12109447168}, {<<"usedByData">>,0}]}}, {<<"hdd">>, {struct,[{<<"total">>,1969015799808}, {<<"quotaTotal">>,1969015799808}, {<<"used">>,19690157998}, {<<"usedByData">>,0}, {<<"free">>,1949325641810}]}}]}}, {<<"storage">>, {struct,[{<<"ssd">>,[]}, {<<"hdd">>, [{struct,[{<<"path">>,<<"/var/lib/pgsql">>}, {<<"index_path">>,<<"/var/lib/pgsql">>}, {<<"quotaMb">>,<<"none">>}, {<<"state">>,<<"ok">>}]}]}]}}, {<<"systemStats">>, {struct,[{<<"cpu_utilization_rate">>,0.1666666666666667}, {<<"swap_total">>,0}, {<<"swap_used">>,0}, {<<"mem_total">>,101408407552}, {<<"mem_free">>,90952019968}]}}, {<<"interestingStats">>,{struct,[]}}, {<<"uptime">>,<<"3731">>}, {<<"memoryTotal">>,101408407552}, {<<"memoryFree">>,90952019968}, {<<"mcdMemoryReserved">>,77368}, {<<"mcdMemoryAllocated">>,77368}, {<<"couchApiBase">>,<<"http://10.242.238.89:8092/">>}, {<<"otpCookie">>,<<"vlfoxubkwseyuphh">>}, {<<"clusterMembership">>,<<"active">>}, {<<"status">>,<<"healthy">>}, {<<"otpNode">>,<<"ns_1@10.242.238.89">>}, {<<"thisNode">>,true}, {<<"hostname">>,<<"10.242.238.89:8091">>}, {<<"clusterCompatibility">>,131077}, {<<"version">>,<<"2.5.1-1083-rel-enterprise">>}, {<<"os">>,<<"x86_64-unknown-linux-gnu">>}, {<<"ports">>, {struct,[{<<"httpsMgmt">>,18091}, {<<"httpsCAPI">>,18092}, {<<"sslProxy">>,11214}, {<<"proxy">>,11211}, {<<"direct">>,11210}]}}]}} [cluster:debug,2014-08-19T16:48:39.421,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:verify_otp_connectivity:578]port_please("ns_1", "10.242.238.89") = 21101 [ns_server:debug,2014-08-19T16:48:39.423,ns_1@10.242.238.88:ns_config_events<0.17896.0>:ns_node_disco_conf_events:handle_event:44]ns_node_disco_conf_events config on nodes_wanted [ns_server:debug,2014-08-19T16:48:39.423,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.423,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: server_groups -> [[{uuid,<<"0">>}, {name,<<"Group 1">>}, {nodes,['ns_1@10.242.238.88','ns_1@10.242.238.89']}]] [cluster:info,2014-08-19T16:48:39.423,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:node_add_transaction_finish:727]Started node add transaction by adding node 'ns_1@10.242.238.89' to nodes_wanted (group: undefined) [ns_server:debug,2014-08-19T16:48:39.423,ns_1@10.242.238.88:mb_master<0.20995.0>:mb_master:update_peers:506]List of peers has changed from ['ns_1@10.242.238.88'] to ['ns_1@10.242.238.88', 'ns_1@10.242.238.89'] [ns_server:debug,2014-08-19T16:48:39.423,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([nodes_wanted,server_groups, {node,'ns_1@10.242.238.89',membership}]..) [ns_server:debug,2014-08-19T16:48:39.423,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',membership} -> inactiveAdded [ns_server:debug,2014-08-19T16:48:39.423,ns_1@10.242.238.88:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T16:48:39.423,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.423,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@10.242.238.88','ns_1@10.242.238.89'] [ns_server:debug,2014-08-19T16:48:39.423,ns_1@10.242.238.88:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T16:48:39.425,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.425,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [cluster:debug,2014-08-19T16:48:39.425,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:do_add_node_engaged_inner:649]Posting the following to complete_join on "10.242.238.89:8091": {struct, [{<<"targetNode">>,'ns_1@10.242.238.89'}, {availableStorage, {struct, [{hdd, [{struct, [{path,<<"/">>}, {sizeKBytes,103212320}, {usagePercent,3}]}, {struct, [{path,<<"/dev/shm">>}, {sizeKBytes,49515824}, {usagePercent,0}]}, {struct, [{path,<<"/boot">>}, {sizeKBytes,198337}, {usagePercent,17}]}, {struct, [{path,<<"/data">>}, {sizeKBytes,329573012}, {usagePercent,1}]}, {struct, [{path,<<"/test">>}, {sizeKBytes,528447160}, {usagePercent,1}]}, {struct, [{path,<<"/var/lib/pgsql">>}, {sizeKBytes,1922866992}, {usagePercent,1}]}]}]}}, {memoryQuota,90112}, {storageTotals, {struct, [{ram, {struct, [{total,101408407552}, {quotaTotal,94489280512}, {quotaUsed,13369344000}, {used,13122936832}, {usedByData,31832760}]}}, {hdd, {struct, [{total,1969015799808}, {quotaTotal,1969015799808}, {used,19690157998}, {usedByData,1343173}, {free,1949325641810}]}}]}}, {storage, {struct, [{ssd,[]}, {hdd, [{struct, [{path,<<"/var/lib/pgsql">>}, {index_path,<<"/var/lib/pgsql">>}, {quotaMb,none}, {state,ok}]}]}]}}, {systemStats, {struct, [{cpu_utilization_rate,0.4592901878914405}, {swap_total,0}, {swap_used,0}, {mem_total,101408407552}, {mem_free,89881563136}]}}, {interestingStats, {struct, [{cmd_get,0.0}, {couch_docs_actual_disk_size,1343173}, {couch_docs_data_size,1336214}, {couch_views_actual_disk_size,0}, {couch_views_data_size,0}, {curr_items,0}, {curr_items_tot,0}, {ep_bg_fetched,0.0}, {get_hits,0.0}, {mem_used,31832760}, {ops,0.0}, {vb_replica_curr_items,0}]}}, {uptime,<<"4066">>}, {memoryTotal,101408407552}, {memoryFree,89881563136}, {mcdMemoryReserved,77368}, {mcdMemoryAllocated,77368}, {couchApiBase,<<"http://10.242.238.88:8092/">>}, {otpCookie,<<"xyzevwdfypcplvpp">>}, {clusterMembership,<<"active">>}, {status,<<"healthy">>}, {otpNode,<<"ns_1@10.242.238.88">>}, {thisNode,true}, {hostname,<<"10.242.238.88:8091">>}, {clusterCompatibility,131077}, {version,<<"2.5.1-1083-rel-enterprise">>}, {os,<<"x86_64-unknown-linux-gnu">>}, {ports, {struct, [{httpsMgmt,18091}, {httpsCAPI,18092}, {sslProxy,11214}, {proxy,11211}, {direct,11210}]}}]} [ns_server:debug,2014-08-19T16:48:39.483,ns_1@10.242.238.88:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T16:48:39.483,ns_1@10.242.238.88:<0.21031.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@10.242.238.88', 'ns_1@10.242.238.89'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:48:39.485,ns_1@10.242.238.88:<0.21031.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@10.242.238.88'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:48:39.569,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 788. Nacking mccouch update. [views:debug,2014-08-19T16:48:39.569,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/788. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:39.569,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",788,active,0} [ns_server:debug,2014-08-19T16:48:39.569,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,792,1016,990,958,926,894,862,830,798,1022,996,964, 932,900,868,836,804,970,938,906,874,842,810,1002,992,976,960,944,928,912,896, 880,864,848,832,816,800,1008,998,982,966,950,934,918,902,886,870,854,838,822, 806,790,1014,988,972,956,940,924,908,892,876,860,844,828,812,796,1020,1004, 1023,994,978,962,946,930,914,898,882,866,850,834,818,802,1010,968,936,904, 872,840,808,1000,974,942,910,878,846,814,1006,980,948,916,884,852,820,788, 1012,986,954,922,890,858,826,794,1018] [views:debug,2014-08-19T16:48:39.645,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/788. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:39.645,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",788,active,0} [ns_server:debug,2014-08-19T16:48:39.776,ns_1@10.242.238.88:<0.18065.0>:xdc_rdoc_replication_srv:nodeup_monitoring_loop:46]got nodeup event. Considering rdocs replication [ns_server:debug,2014-08-19T16:48:39.776,ns_1@10.242.238.88:<0.19195.0>:capi_set_view_manager:nodeup_monitoring_loop:176]got nodeup event. Considering ddocs replication [user:info,2014-08-19T16:48:39.776,ns_1@10.242.238.88:ns_node_disco<0.17920.0>:ns_node_disco:handle_info:159]Node 'ns_1@10.242.238.88' saw that node 'ns_1@10.242.238.89' came up. Tags: [] [ns_server:debug,2014-08-19T16:48:39.776,ns_1@10.242.238.88:xdc_rdoc_replication_srv<0.18064.0>:xdc_rdoc_replication_srv:handle_info:154]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.776,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.777,ns_1@10.242.238.88:ns_node_disco_events<0.17919.0>:ns_node_disco_rep_events:handle_event:42]Detected a new nodes (['ns_1@10.242.238.89']). Moving config around. [ns_server:info,2014-08-19T16:48:39.777,ns_1@10.242.238.88:ns_node_disco_events<0.17919.0>:ns_node_disco_log:handle_event:46]ns_node_disco_log: nodes changed: ['ns_1@10.242.238.88','ns_1@10.242.238.89'] [ns_server:warn,2014-08-19T16:48:39.777,ns_1@10.242.238.88:xdc_rdoc_replication_srv<0.18064.0>:xdc_rdoc_replication_srv:handle_info:150]Remote server node {xdc_rdoc_replication_srv,'ns_1@10.242.238.89'} process down: noproc [ns_server:debug,2014-08-19T16:48:39.811,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 786. Nacking mccouch update. [views:debug,2014-08-19T16:48:39.812,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/786. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:39.812,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",786,active,0} [ns_server:debug,2014-08-19T16:48:39.812,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,792,1016,990,958,926,894,862,830,798,1022,996,964, 932,900,868,836,804,970,938,906,874,842,810,1002,992,976,960,944,928,912,896, 880,864,848,832,816,800,1008,998,982,966,950,934,918,902,886,870,854,838,822, 806,790,1014,988,972,956,940,924,908,892,876,860,844,828,812,796,1020,1004, 1023,994,978,962,946,930,914,898,882,866,850,834,818,802,786,1010,968,936, 904,872,840,808,1000,974,942,910,878,846,814,1006,980,948,916,884,852,820, 788,1012,986,954,922,890,858,826,794,1018] [ns_server:debug,2014-08-19T16:48:39.842,ns_1@10.242.238.88:ns_config_events<0.17896.0>:ns_node_disco_conf_events:handle_event:50]ns_node_disco_conf_events config on otp [ns_server:debug,2014-08-19T16:48:39.842,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.843,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.843,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.843,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.843,ns_1@10.242.238.88:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T16:48:39.843,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: otp -> [{cookie,xyzevwdfypcplvpp}] [ns_server:debug,2014-08-19T16:48:39.843,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',capi_port} -> 8092 [ns_server:debug,2014-08-19T16:48:39.843,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T16:48:39.843,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.843,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',config_version} -> {2,3,0} [ns_server:debug,2014-08-19T16:48:39.844,ns_1@10.242.238.88:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T16:48:39.844,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T16:48:39.844,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.844,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.844,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T16:48:39.844,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.844,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T16:48:39.845,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.845,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [ns_server:debug,2014-08-19T16:48:39.845,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.845,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.845,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.845,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.845,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.846,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T16:48:39.846,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:48:39.846,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T16:48:39.846,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T16:48:39.846,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T16:48:39.846,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T16:48:39.846,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',ssl_rest_port} -> 18091 [ns_server:debug,2014-08-19T16:48:39.891,ns_1@10.242.238.88:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T16:48:39.891,ns_1@10.242.238.88:<0.21068.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@10.242.238.88', 'ns_1@10.242.238.89'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:48:39.892,ns_1@10.242.238.88:<0.21068.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@10.242.238.88','ns_1@10.242.238.89'], with cookie: xyzevwdfypcplvpp [views:debug,2014-08-19T16:48:39.896,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/786. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:39.897,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",786,active,0} [ns_server:debug,2014-08-19T16:48:39.974,ns_1@10.242.238.88:xdc_rdoc_replication_srv<0.18064.0>:xdc_rdoc_replication_srv:handle_info:154]doing replicate_newnodes_docs [cluster:debug,2014-08-19T16:48:39.977,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:do_add_node_engaged_inner:656]Reply from complete_join on "10.242.238.89:8091": {ok,[]} [cluster:debug,2014-08-19T16:48:39.978,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:handle_call:155]add_node("10.242.238.89", 8091, undefined, ..) -> {ok,'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:48:39.996,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 784. Nacking mccouch update. [views:debug,2014-08-19T16:48:39.996,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/784. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:39.996,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",784,active,0} [ns_server:debug,2014-08-19T16:48:39.996,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,792,1016,990,958,926,894,862,830,798,1022,996,964, 932,900,868,836,804,970,938,906,874,842,810,1002,992,976,960,944,928,912,896, 880,864,848,832,816,800,784,1008,998,982,966,950,934,918,902,886,870,854,838, 822,806,790,1014,988,972,956,940,924,908,892,876,860,844,828,812,796,1020, 1004,1023,994,978,962,946,930,914,898,882,866,850,834,818,802,786,1010,968, 936,904,872,840,808,1000,974,942,910,878,846,814,1006,980,948,916,884,852, 820,788,1012,986,954,922,890,858,826,794,1018] [views:debug,2014-08-19T16:48:40.030,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/784. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:40.030,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",784,active,0} [error_logger:error,2014-08-19T16:48:40.053,ns_1@10.242.238.88:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================CRASH REPORT========================= crasher: initial call: ns_log:-start_link_crash_consumer/0-fun-0-/0 pid: <0.17911.0> registered_name: [] exception exit: {{nodedown,'babysitter_of_ns_1@127.0.0.1'}, {gen_server,call, [{ns_crash_log,'babysitter_of_ns_1@127.0.0.1'}, consume,infinity]}} in function gen_server:call/3 in call from ns_log:crash_consumption_loop/0 in call from misc:delaying_crash/2 ancestors: [ns_server_sup,ns_server_cluster_sup,<0.58.0>] messages: [{#Ref<0.0.0.187383>,superseded}, {#Ref<0.0.0.188377>,superseded}] links: [<0.17906.0>] dictionary: [] trap_exit: false status: running heap_size: 6765 stack_size: 24 reductions: 3239 neighbours: [error_logger:info,2014-08-19T16:48:40.053,ns_1@10.242.238.88:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.21088.0>}, {name,ns_crash_log_consumer}, {mfa,{ns_log,start_link_crash_consumer,[]}}, {restart_type,{permanent,4}}, {shutdown,1000}, {child_type,worker}] [ns_server:debug,2014-08-19T16:48:40.054,ns_1@10.242.238.88:<0.20966.0>:ns_pubsub:do_subscribe_link:136]Parent process of subscription {ns_config_events,<0.20965.0>} exited with reason {badmatch, false} [error_logger:error,2014-08-19T16:48:40.054,ns_1@10.242.238.88:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================CRASH REPORT========================= crasher: initial call: ns_ports_setup:setup_body_tramp/0 pid: <0.20965.0> registered_name: ns_ports_setup exception error: no match of right hand side value false in function ns_ports_setup:dynamic_children/0 in call from ns_ports_setup:setup_body/0 in call from misc:delaying_crash/2 ancestors: [ns_server_sup,ns_server_cluster_sup,<0.58.0>] messages: [check_childs_update,check_childs_update,check_childs_update, check_childs_update,check_childs_update,check_childs_update, check_childs_update,check_childs_update,check_childs_update, check_childs_update,check_childs_update,check_childs_update, check_childs_update,check_childs_update,check_childs_update, check_childs_update,check_childs_update,check_childs_update, check_childs_update,check_childs_update,check_childs_update, check_childs_update,check_childs_update] links: [<0.17906.0>,<0.20966.0>] dictionary: [] trap_exit: false status: running heap_size: 28657 stack_size: 24 reductions: 2046 neighbours: [error_logger:info,2014-08-19T16:48:40.055,ns_1@10.242.238.88:error_logger<0.6.0>:ale_error_logger_handler:log_report:115] =========================PROGRESS REPORT========================= supervisor: {local,ns_server_sup} started: [{pid,<0.21091.0>}, {name,ns_ports_setup}, {mfa,{ns_ports_setup,start,[]}}, {restart_type,{permanent,4}}, {shutdown,brutal_kill}, {child_type,worker}] [user:info,2014-08-19T16:48:40.059,ns_1@10.242.238.88:<0.21088.0>:ns_log:crash_consumption_loop:64]Port server moxi on node 'babysitter_of_ns_1@127.0.0.1' exited with status 0. Restarting. Messages: 2014-08-19 16:48:17: (cproxy_config.c.315) env: MOXI_SASL_PLAIN_USR (13) 2014-08-19 16:48:17: (cproxy_config.c.324) env: MOXI_SASL_PLAIN_PWD (12) 2014-08-19 16:48:19: (agent_config.c.703) ERROR: bad JSON configuration from http://127.0.0.1:8091/pools/default/saslBucketsStreaming: Number of vBuckets must be a power of two > 0 and <= 65536 EOL on stdin. Exiting [ns_server:debug,2014-08-19T16:48:40.096,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 782. Nacking mccouch update. [views:debug,2014-08-19T16:48:40.096,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/782. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:40.097,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",782,active,0} [ns_server:debug,2014-08-19T16:48:40.097,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,792,1016,990,958,926,894,862,830,798,1022,996,964, 932,900,868,836,804,970,938,906,874,842,810,1002,992,976,960,944,928,912,896, 880,864,848,832,816,800,784,1008,998,982,966,950,934,918,902,886,870,854,838, 822,806,790,1014,988,972,956,940,924,908,892,876,860,844,828,812,796,1020, 1004,1023,994,978,962,946,930,914,898,882,866,850,834,818,802,786,1010,968, 936,904,872,840,808,1000,974,942,910,878,846,814,782,1006,980,948,916,884, 852,820,788,1012,986,954,922,890,858,826,794,1018] [views:debug,2014-08-19T16:48:40.131,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/782. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:40.131,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",782,active,0} [ns_server:debug,2014-08-19T16:48:40.197,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 780. Nacking mccouch update. [views:debug,2014-08-19T16:48:40.197,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/780. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:40.197,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",780,active,0} [ns_server:debug,2014-08-19T16:48:40.198,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,792,1016,990,958,926,894,862,830,798,1022,996,964, 932,900,868,836,804,970,938,906,874,842,810,1002,992,976,960,944,928,912,896, 880,864,848,832,816,800,784,1008,998,982,966,950,934,918,902,886,870,854,838, 822,806,790,1014,988,972,956,940,924,908,892,876,860,844,828,812,796,780, 1020,1004,1023,994,978,962,946,930,914,898,882,866,850,834,818,802,786,1010, 968,936,904,872,840,808,1000,974,942,910,878,846,814,782,1006,980,948,916, 884,852,820,788,1012,986,954,922,890,858,826,794,1018] [views:debug,2014-08-19T16:48:40.231,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/780. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:40.232,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",780,active,0} [ns_server:debug,2014-08-19T16:48:40.298,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 778. Nacking mccouch update. [views:debug,2014-08-19T16:48:40.298,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/778. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:40.298,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",778,active,0} [ns_server:debug,2014-08-19T16:48:40.299,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,792,1016,990,958,926,894,862,830,798,1022,996,964, 932,900,868,836,804,970,938,906,874,842,810,778,1002,992,976,960,944,928,912, 896,880,864,848,832,816,800,784,1008,998,982,966,950,934,918,902,886,870,854, 838,822,806,790,1014,988,972,956,940,924,908,892,876,860,844,828,812,796,780, 1020,1004,1023,994,978,962,946,930,914,898,882,866,850,834,818,802,786,1010, 968,936,904,872,840,808,1000,974,942,910,878,846,814,782,1006,980,948,916, 884,852,820,788,1012,986,954,922,890,858,826,794,1018] [views:debug,2014-08-19T16:48:40.332,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/778. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:40.332,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",778,active,0} [ns_server:debug,2014-08-19T16:48:40.412,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 776. Nacking mccouch update. [views:debug,2014-08-19T16:48:40.412,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/776. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:40.412,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",776,active,0} [ns_server:debug,2014-08-19T16:48:40.413,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,792,1016,990,958,926,894,862,830,798,1022,996,964, 932,900,868,836,804,970,938,906,874,842,810,778,1002,992,976,960,944,928,912, 896,880,864,848,832,816,800,784,1008,998,982,966,950,934,918,902,886,870,854, 838,822,806,790,1014,988,972,956,940,924,908,892,876,860,844,828,812,796,780, 1020,1004,1023,994,978,962,946,930,914,898,882,866,850,834,818,802,786,1010, 968,936,904,872,840,808,776,1000,974,942,910,878,846,814,782,1006,980,948, 916,884,852,820,788,1012,986,954,922,890,858,826,794,1018] [views:debug,2014-08-19T16:48:40.497,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/776. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:40.497,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",776,active,0} [ns_server:debug,2014-08-19T16:48:40.664,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 774. Nacking mccouch update. [views:debug,2014-08-19T16:48:40.664,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/774. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:40.664,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",774,active,0} [ns_server:debug,2014-08-19T16:48:40.665,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,792,1016,990,958,926,894,862,830,798,1022,996,964, 932,900,868,836,804,970,938,906,874,842,810,778,1002,976,944,912,880,848,816, 784,1008,998,982,966,950,934,918,902,886,870,854,838,822,806,790,774,1014, 988,972,956,940,924,908,892,876,860,844,828,812,796,780,1020,1004,1023,994, 978,962,946,930,914,898,882,866,850,834,818,802,786,1010,968,936,904,872,840, 808,776,1000,974,942,910,878,846,814,782,1006,980,948,916,884,852,820,788, 1012,986,954,922,890,858,826,794,1018,992,960,928,896,864,832,800] [views:debug,2014-08-19T16:48:40.749,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/774. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:40.749,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",774,active,0} [ns_server:debug,2014-08-19T16:48:40.915,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 772. Nacking mccouch update. [views:debug,2014-08-19T16:48:40.915,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/772. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:40.915,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",772,active,0} [ns_server:debug,2014-08-19T16:48:40.915,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,792,1016,990,958,926,894,862,830,798,1022,996,964, 932,900,868,836,804,772,970,938,906,874,842,810,778,1002,976,944,912,880,848, 816,784,1008,998,982,966,950,934,918,902,886,870,854,838,822,806,790,774, 1014,988,972,956,940,924,908,892,876,860,844,828,812,796,780,1020,1004,1023, 994,978,962,946,930,914,898,882,866,850,834,818,802,786,1010,968,936,904,872, 840,808,776,1000,974,942,910,878,846,814,782,1006,980,948,916,884,852,820, 788,1012,986,954,922,890,858,826,794,1018,992,960,928,896,864,832,800] [views:debug,2014-08-19T16:48:40.999,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/772. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:40.999,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",772,active,0} [ns_server:debug,2014-08-19T16:48:41.174,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 770. Nacking mccouch update. [views:debug,2014-08-19T16:48:41.174,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/770. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:41.174,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",770,active,0} [ns_server:debug,2014-08-19T16:48:41.174,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,792,1016,990,958,926,894,862,830,798,1022,996,964, 932,900,868,836,804,772,970,938,906,874,842,810,778,1002,976,944,912,880,848, 816,784,1008,998,982,966,950,934,918,902,886,870,854,838,822,806,790,774, 1014,988,972,956,940,924,908,892,876,860,844,828,812,796,780,1020,1004,1023, 994,978,962,946,930,914,898,882,866,850,834,818,802,786,770,1010,968,936,904, 872,840,808,776,1000,974,942,910,878,846,814,782,1006,980,948,916,884,852, 820,788,1012,986,954,922,890,858,826,794,1018,992,960,928,896,864,832,800] [views:debug,2014-08-19T16:48:41.259,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/770. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:41.259,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",770,active,0} [ns_server:debug,2014-08-19T16:48:41.433,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 768. Nacking mccouch update. [views:debug,2014-08-19T16:48:41.433,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/768. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:41.434,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",768,active,0} [ns_server:debug,2014-08-19T16:48:41.434,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,792,1016,990,958,926,894,862,830,798,1022,996,964, 932,900,868,836,804,772,970,938,906,874,842,810,778,1002,976,944,912,880,848, 816,784,1008,998,982,966,950,934,918,902,886,870,854,838,822,806,790,774, 1014,988,972,956,940,924,908,892,876,860,844,828,812,796,780,1020,1004,1023, 994,978,962,946,930,914,898,882,866,850,834,818,802,786,770,1010,968,936,904, 872,840,808,776,1000,974,942,910,878,846,814,782,1006,980,948,916,884,852, 820,788,1012,986,954,922,890,858,826,794,1018,992,960,928,896,864,832,800, 768] [views:debug,2014-08-19T16:48:41.516,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/768. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:41.516,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",768,active,0} [ns_server:debug,2014-08-19T16:48:41.591,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 766. Nacking mccouch update. [views:debug,2014-08-19T16:48:41.591,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/766. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:41.591,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",766,active,0} [ns_server:debug,2014-08-19T16:48:41.592,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,792,1016,990,958,926,894,862,830,798,1022,996,964, 932,900,868,836,804,772,970,938,906,874,842,810,778,1002,976,944,912,880,848, 816,784,1008,998,982,966,950,934,918,902,886,870,854,838,822,806,790,774, 1014,988,972,956,940,924,908,892,876,860,844,828,812,796,780,1020,1004,1023, 994,978,962,946,930,914,898,882,866,850,834,818,802,786,770,1010,766,968,936, 904,872,840,808,776,1000,974,942,910,878,846,814,782,1006,980,948,916,884, 852,820,788,1012,986,954,922,890,858,826,794,1018,992,960,928,896,864,832, 800,768] [views:debug,2014-08-19T16:48:41.625,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/766. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:41.625,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",766,active,0} [ns_server:debug,2014-08-19T16:48:41.692,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 764. Nacking mccouch update. [views:debug,2014-08-19T16:48:41.692,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/764. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:41.692,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",764,active,0} [ns_server:debug,2014-08-19T16:48:41.692,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,792,1016,990,958,926,894,862,830,798,1022,996,964, 932,900,868,836,804,772,970,938,906,874,842,810,778,1002,976,944,912,880,848, 816,784,1008,998,982,966,950,934,918,902,886,870,854,838,822,806,790,774, 1014,988,972,956,940,924,908,892,876,860,844,828,812,796,780,1020,1004,1023, 994,978,962,946,930,914,898,882,866,850,834,818,802,786,770,1010,766,968,936, 904,872,840,808,776,1000,974,942,910,878,846,814,782,1006,980,948,916,884, 852,820,788,1012,986,954,922,890,858,826,794,1018,992,960,928,896,864,832, 800,768,764] [views:debug,2014-08-19T16:48:41.726,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/764. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:41.727,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",764,active,0} [ns_server:debug,2014-08-19T16:48:41.792,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 762. Nacking mccouch update. [views:debug,2014-08-19T16:48:41.793,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/762. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:41.793,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",762,active,0} [ns_server:debug,2014-08-19T16:48:41.793,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,792,1016,990,958,926,894,862,830,798,1022,762,996, 964,932,900,868,836,804,772,970,938,906,874,842,810,778,1002,976,944,912,880, 848,816,784,1008,998,982,966,950,934,918,902,886,870,854,838,822,806,790,774, 1014,988,972,956,940,924,908,892,876,860,844,828,812,796,780,1020,1004,1023, 994,978,962,946,930,914,898,882,866,850,834,818,802,786,770,1010,766,968,936, 904,872,840,808,776,1000,974,942,910,878,846,814,782,1006,980,948,916,884, 852,820,788,1012,986,954,922,890,858,826,794,1018,992,960,928,896,864,832, 800,768,764] [views:debug,2014-08-19T16:48:41.826,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/762. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:41.827,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",762,active,0} [ns_server:debug,2014-08-19T16:48:41.893,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 760. Nacking mccouch update. [views:debug,2014-08-19T16:48:41.893,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/760. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:41.893,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",760,active,0} [ns_server:debug,2014-08-19T16:48:41.894,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,792,1016,990,958,926,894,862,830,798,1022,762,996, 964,932,900,868,836,804,772,970,938,906,874,842,810,778,1002,976,944,912,880, 848,816,784,1008,998,982,966,950,934,918,902,886,870,854,838,822,806,790,774, 1014,988,972,956,940,924,908,892,876,860,844,828,812,796,780,1020,1004,760, 1023,994,978,962,946,930,914,898,882,866,850,834,818,802,786,770,1010,766, 968,936,904,872,840,808,776,1000,974,942,910,878,846,814,782,1006,980,948, 916,884,852,820,788,1012,986,954,922,890,858,826,794,1018,992,960,928,896, 864,832,800,768,764] [views:debug,2014-08-19T16:48:41.927,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/760. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:41.927,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",760,active,0} [ns_server:debug,2014-08-19T16:48:42.075,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 758. Nacking mccouch update. [views:debug,2014-08-19T16:48:42.075,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/758. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:42.075,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",758,active,0} [ns_server:debug,2014-08-19T16:48:42.076,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,792,1016,990,958,926,894,862,830,798,1022,762,996, 964,932,900,868,836,804,772,970,938,906,874,842,810,778,1002,976,944,912,880, 848,816,784,1008,998,982,966,950,934,918,902,886,870,854,838,822,806,790,774, 1014,988,972,956,940,924,908,892,876,860,844,828,812,796,780,1020,1004,760, 1023,994,978,962,946,930,914,898,882,866,850,834,818,802,786,770,1010,766, 968,936,904,872,840,808,776,1000,974,942,910,878,846,814,782,1006,980,948, 916,884,852,820,788,1012,986,954,922,890,858,826,794,1018,758,992,960,928, 896,864,832,800,768,764] [views:debug,2014-08-19T16:48:42.142,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/758. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:42.143,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",758,active,0} [ns_server:debug,2014-08-19T16:48:42.293,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 756. Nacking mccouch update. [views:debug,2014-08-19T16:48:42.293,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/756. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:42.293,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",756,active,0} [ns_server:debug,2014-08-19T16:48:42.293,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,792,1016,756,990,958,926,894,862,830,798,1022,762, 996,964,932,900,868,836,804,772,970,938,906,874,842,810,778,1002,976,944,912, 880,848,816,784,1008,998,982,966,950,934,918,902,886,870,854,838,822,806,790, 774,1014,988,972,956,940,924,908,892,876,860,844,828,812,796,780,1020,1004, 760,1023,994,978,962,946,930,914,898,882,866,850,834,818,802,786,770,1010, 766,968,936,904,872,840,808,776,1000,974,942,910,878,846,814,782,1006,980, 948,916,884,852,820,788,1012,986,954,922,890,858,826,794,1018,758,992,960, 928,896,864,832,800,768,764] [views:debug,2014-08-19T16:48:42.327,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/756. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:42.327,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",756,active,0} [ns_server:debug,2014-08-19T16:48:42.410,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 754. Nacking mccouch update. [views:debug,2014-08-19T16:48:42.410,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/754. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:42.410,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",754,active,0} [ns_server:debug,2014-08-19T16:48:42.411,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,792,1016,756,990,958,926,894,862,830,798,1022,762, 996,964,932,900,868,836,804,772,970,938,906,874,842,810,778,1002,976,944,912, 880,848,816,784,1008,982,950,918,886,854,822,790,1014,754,988,972,956,940, 924,908,892,876,860,844,828,812,796,780,1020,1004,760,1023,994,978,962,946, 930,914,898,882,866,850,834,818,802,786,770,1010,766,968,936,904,872,840,808, 776,1000,974,942,910,878,846,814,782,1006,980,948,916,884,852,820,788,1012, 986,954,922,890,858,826,794,1018,758,992,960,928,896,864,832,800,768,764,998, 966,934,902,870,838,806,774] [views:debug,2014-08-19T16:48:42.461,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/754. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:42.461,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",754,active,0} [ns_server:debug,2014-08-19T16:48:42.528,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 752. Nacking mccouch update. [views:debug,2014-08-19T16:48:42.528,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/752. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:42.528,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",752,active,0} [ns_server:debug,2014-08-19T16:48:42.528,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [984,952,920,888,856,824,792,1016,756,990,958,926,894,862,830,798,1022,762, 996,964,932,900,868,836,804,772,970,938,906,874,842,810,778,1002,976,944,912, 880,848,816,784,1008,982,950,918,886,854,822,790,1014,754,988,972,956,940, 924,908,892,876,860,844,828,812,796,780,1020,1004,760,1023,994,978,962,946, 930,914,898,882,866,850,834,818,802,786,770,1010,766,968,936,904,872,840,808, 776,1000,974,942,910,878,846,814,782,1006,980,948,916,884,852,820,788,1012, 752,986,954,922,890,858,826,794,1018,758,992,960,928,896,864,832,800,768,764, 998,966,934,902,870,838,806,774] [views:debug,2014-08-19T16:48:42.562,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/752. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:42.562,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",752,active,0} [ns_server:debug,2014-08-19T16:48:42.645,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 750. Nacking mccouch update. [views:debug,2014-08-19T16:48:42.645,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/750. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:42.645,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",750,active,0} [ns_server:debug,2014-08-19T16:48:42.646,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,952,920,888,856,824,792,1016,756,990,958,926,894,862,830,798,1022, 762,996,964,932,900,868,836,804,772,970,938,906,874,842,810,778,1002,976,944, 912,880,848,816,784,1008,982,950,918,886,854,822,790,1014,754,988,972,956, 940,924,908,892,876,860,844,828,812,796,780,1020,1004,760,1023,994,978,962, 946,930,914,898,882,866,850,834,818,802,786,770,1010,766,968,936,904,872,840, 808,776,1000,974,942,910,878,846,814,782,1006,980,948,916,884,852,820,788, 1012,752,986,954,922,890,858,826,794,1018,758,992,960,928,896,864,832,800, 768,764,998,966,934,902,870,838,806,774] [views:debug,2014-08-19T16:48:42.736,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/750. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:42.736,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",750,active,0} [ns_server:debug,2014-08-19T16:48:42.904,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 748. Nacking mccouch update. [views:debug,2014-08-19T16:48:42.904,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/748. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:42.904,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",748,active,0} [ns_server:debug,2014-08-19T16:48:42.905,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,952,920,888,856,824,792,1016,756,990,958,926,894,862,830,798,1022, 762,996,964,932,900,868,836,804,772,970,938,906,874,842,810,778,1002,976,944, 912,880,848,816,784,1008,748,982,950,918,886,854,822,790,1014,754,988,972, 956,940,924,908,892,876,860,844,828,812,796,780,1020,1004,760,1023,994,978, 962,946,930,914,898,882,866,850,834,818,802,786,770,1010,766,968,936,904,872, 840,808,776,1000,974,942,910,878,846,814,782,1006,980,948,916,884,852,820, 788,1012,752,986,954,922,890,858,826,794,1018,758,992,960,928,896,864,832, 800,768,764,998,966,934,902,870,838,806,774] [views:debug,2014-08-19T16:48:42.988,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/748. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:42.988,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",748,active,0} [ns_server:debug,2014-08-19T16:48:43.155,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 746. Nacking mccouch update. [views:debug,2014-08-19T16:48:43.155,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/746. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:43.155,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",746,active,0} [ns_server:debug,2014-08-19T16:48:43.155,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,952,920,888,856,824,792,1016,756,990,958,926,894,862,830,798,1022, 762,996,964,932,900,868,836,804,772,970,938,906,874,842,810,778,1002,976,944, 912,880,848,816,784,1008,748,982,950,918,886,854,822,790,1014,754,988,972, 956,940,924,908,892,876,860,844,828,812,796,780,1020,1004,760,1023,994,978, 962,946,930,914,898,882,866,850,834,818,802,786,770,1010,766,968,936,904,872, 840,808,776,1000,974,942,910,878,846,814,782,1006,746,980,948,916,884,852, 820,788,1012,752,986,954,922,890,858,826,794,1018,758,992,960,928,896,864, 832,800,768,764,998,966,934,902,870,838,806,774] [views:debug,2014-08-19T16:48:43.239,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/746. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:43.239,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",746,active,0} [ns_server:debug,2014-08-19T16:48:43.414,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 744. Nacking mccouch update. [views:debug,2014-08-19T16:48:43.414,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/744. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:43.414,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",744,active,0} [ns_server:debug,2014-08-19T16:48:43.415,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,952,920,888,856,824,792,1016,756,990,958,926,894,862,830,798,1022, 762,996,964,932,900,868,836,804,772,970,938,906,874,842,810,778,1002,976,944, 912,880,848,816,784,1008,748,982,950,918,886,854,822,790,1014,754,988,972, 956,940,924,908,892,876,860,844,828,812,796,780,1020,1004,760,744,1023,994, 978,962,946,930,914,898,882,866,850,834,818,802,786,770,1010,766,968,936,904, 872,840,808,776,1000,974,942,910,878,846,814,782,1006,746,980,948,916,884, 852,820,788,1012,752,986,954,922,890,858,826,794,1018,758,992,960,928,896, 864,832,800,768,764,998,966,934,902,870,838,806,774] [views:debug,2014-08-19T16:48:43.474,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/744. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:43.474,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",744,active,0} [ns_server:debug,2014-08-19T16:48:43.632,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 742. Nacking mccouch update. [views:debug,2014-08-19T16:48:43.632,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/742. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:43.632,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",742,active,0} [ns_server:debug,2014-08-19T16:48:43.632,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,952,920,888,856,824,792,1016,756,990,958,926,894,862,830,798,1022, 762,996,964,932,900,868,836,804,772,970,938,906,874,842,810,778,1002,742,976, 944,912,880,848,816,784,1008,748,982,950,918,886,854,822,790,1014,754,988, 972,956,940,924,908,892,876,860,844,828,812,796,780,1020,1004,760,744,1023, 994,978,962,946,930,914,898,882,866,850,834,818,802,786,770,1010,766,968,936, 904,872,840,808,776,1000,974,942,910,878,846,814,782,1006,746,980,948,916, 884,852,820,788,1012,752,986,954,922,890,858,826,794,1018,758,992,960,928, 896,864,832,800,768,764,998,966,934,902,870,838,806,774] [views:debug,2014-08-19T16:48:43.682,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/742. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:43.682,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",742,active,0} [ns_server:debug,2014-08-19T16:48:43.771,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 740. Nacking mccouch update. [views:debug,2014-08-19T16:48:43.771,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/740. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:43.771,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",740,active,0} [ns_server:debug,2014-08-19T16:48:43.771,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,952,920,888,856,824,792,1016,756,990,958,926,894,862,830,798,1022, 762,996,964,932,900,868,836,804,772,970,938,906,874,842,810,778,1002,742,976, 944,912,880,848,816,784,1008,748,982,950,918,886,854,822,790,1014,754,988, 972,956,940,924,908,892,876,860,844,828,812,796,780,1020,1004,760,744,1023, 994,978,962,946,930,914,898,882,866,850,834,818,802,786,770,1010,766,968,936, 904,872,840,808,776,1000,740,974,942,910,878,846,814,782,1006,746,980,948, 916,884,852,820,788,1012,752,986,954,922,890,858,826,794,1018,758,992,960, 928,896,864,832,800,768,764,998,966,934,902,870,838,806,774] [views:debug,2014-08-19T16:48:43.830,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/740. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:43.830,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",740,active,0} [ns_server:debug,2014-08-19T16:48:43.946,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 738. Nacking mccouch update. [views:debug,2014-08-19T16:48:43.947,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/738. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:43.947,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",738,active,0} [ns_server:debug,2014-08-19T16:48:43.947,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,952,920,888,856,824,792,1016,756,990,958,926,894,862,830,798,1022, 762,996,964,932,900,868,836,804,772,970,938,906,874,842,810,778,1002,742,976, 944,912,880,848,816,784,1008,748,982,950,918,886,854,822,790,1014,754,988, 972,956,940,924,908,892,876,860,844,828,812,796,780,1020,1004,760,744,1023, 994,978,962,946,930,914,898,882,866,850,834,818,802,786,770,1010,766,968,936, 904,872,840,808,776,1000,740,974,942,910,878,846,814,782,1006,746,980,948, 916,884,852,820,788,1012,752,986,954,922,890,858,826,794,1018,758,992,960, 928,896,864,832,800,768,764,998,966,934,902,870,838,806,774,738] [views:debug,2014-08-19T16:48:44.005,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/738. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:44.005,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",738,active,0} [ns_server:debug,2014-08-19T16:48:44.073,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 736. Nacking mccouch update. [views:debug,2014-08-19T16:48:44.074,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/736. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:44.074,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",736,active,0} [ns_server:debug,2014-08-19T16:48:44.074,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,952,920,888,856,824,792,1016,756,990,958,926,894,862,830,798,1022, 762,996,964,932,900,868,836,804,772,736,970,938,906,874,842,810,778,1002,742, 976,944,912,880,848,816,784,1008,748,982,950,918,886,854,822,790,1014,754, 988,972,956,940,924,908,892,876,860,844,828,812,796,780,1020,1004,760,744, 1023,994,978,962,946,930,914,898,882,866,850,834,818,802,786,770,1010,766, 968,936,904,872,840,808,776,1000,740,974,942,910,878,846,814,782,1006,746, 980,948,916,884,852,820,788,1012,752,986,954,922,890,858,826,794,1018,758, 992,960,928,896,864,832,800,768,764,998,966,934,902,870,838,806,774,738] [views:debug,2014-08-19T16:48:44.107,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/736. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:44.107,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",736,active,0} [ns_server:debug,2014-08-19T16:48:44.199,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 734. Nacking mccouch update. [views:debug,2014-08-19T16:48:44.199,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/734. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:44.199,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",734,active,0} [ns_server:debug,2014-08-19T16:48:44.200,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,952,920,888,856,824,792,1016,756,990,958,926,894,862,830,798,1022, 762,996,964,932,900,868,836,804,772,736,970,938,906,874,842,810,778,1002,742, 976,944,912,880,848,816,784,1008,748,982,950,918,886,854,822,790,1014,754, 988,956,924,892,860,828,796,1020,760,744,1023,994,978,962,946,930,914,898, 882,866,850,834,818,802,786,770,1010,766,734,968,936,904,872,840,808,776, 1000,740,974,942,910,878,846,814,782,1006,746,980,948,916,884,852,820,788, 1012,752,986,954,922,890,858,826,794,1018,758,992,960,928,896,864,832,800, 768,764,998,966,934,902,870,838,806,774,738,972,940,908,876,844,812,780,1004] [views:debug,2014-08-19T16:48:44.259,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/734. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:44.259,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",734,active,0} [ns_server:debug,2014-08-19T16:48:44.349,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 732. Nacking mccouch update. [views:debug,2014-08-19T16:48:44.349,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/732. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:44.349,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",732,active,0} [ns_server:debug,2014-08-19T16:48:44.349,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,952,920,888,856,824,792,1016,756,990,958,926,894,862,830,798,1022, 762,996,964,932,900,868,836,804,772,736,970,938,906,874,842,810,778,1002,742, 976,944,912,880,848,816,784,1008,748,982,950,918,886,854,822,790,1014,754, 988,956,924,892,860,828,796,1020,760,744,1023,994,978,962,946,930,914,898, 882,866,850,834,818,802,786,770,1010,766,734,968,936,904,872,840,808,776, 1000,740,974,942,910,878,846,814,782,1006,746,980,948,916,884,852,820,788, 1012,752,986,954,922,890,858,826,794,1018,758,992,960,928,896,864,832,800, 768,764,732,998,966,934,902,870,838,806,774,738,972,940,908,876,844,812,780, 1004] [views:debug,2014-08-19T16:48:44.391,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/732. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:44.391,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",732,active,0} [ns_server:debug,2014-08-19T16:48:44.516,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 730. Nacking mccouch update. [views:debug,2014-08-19T16:48:44.516,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/730. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:44.516,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",730,active,0} [ns_server:debug,2014-08-19T16:48:44.517,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,952,920,888,856,824,792,1016,756,990,958,926,894,862,830,798,1022, 762,730,996,964,932,900,868,836,804,772,736,970,938,906,874,842,810,778,1002, 742,976,944,912,880,848,816,784,1008,748,982,950,918,886,854,822,790,1014, 754,988,956,924,892,860,828,796,1020,760,744,1023,994,978,962,946,930,914, 898,882,866,850,834,818,802,786,770,1010,766,734,968,936,904,872,840,808,776, 1000,740,974,942,910,878,846,814,782,1006,746,980,948,916,884,852,820,788, 1012,752,986,954,922,890,858,826,794,1018,758,992,960,928,896,864,832,800, 768,764,732,998,966,934,902,870,838,806,774,738,972,940,908,876,844,812,780, 1004] [views:debug,2014-08-19T16:48:44.550,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/730. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:44.550,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",730,active,0} [ns_server:debug,2014-08-19T16:48:44.675,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 728. Nacking mccouch update. [views:debug,2014-08-19T16:48:44.675,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/728. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:44.675,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",728,active,0} [ns_server:debug,2014-08-19T16:48:44.676,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,952,920,888,856,824,792,1016,756,990,958,926,894,862,830,798,1022, 762,730,996,964,932,900,868,836,804,772,736,970,938,906,874,842,810,778,1002, 742,976,944,912,880,848,816,784,1008,748,982,950,918,886,854,822,790,1014, 754,988,956,924,892,860,828,796,1020,760,744,728,1023,994,978,962,946,930, 914,898,882,866,850,834,818,802,786,770,1010,766,734,968,936,904,872,840,808, 776,1000,740,974,942,910,878,846,814,782,1006,746,980,948,916,884,852,820, 788,1012,752,986,954,922,890,858,826,794,1018,758,992,960,928,896,864,832, 800,768,764,732,998,966,934,902,870,838,806,774,738,972,940,908,876,844,812, 780,1004] [views:debug,2014-08-19T16:48:44.709,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/728. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:44.709,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",728,active,0} [ns_server:debug,2014-08-19T16:48:44.834,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 726. Nacking mccouch update. [views:debug,2014-08-19T16:48:44.834,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/726. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:44.835,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",726,active,0} [ns_server:debug,2014-08-19T16:48:44.835,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,952,920,888,856,824,792,1016,756,990,958,926,894,862,830,798,1022, 762,730,996,964,932,900,868,836,804,772,736,970,938,906,874,842,810,778,1002, 742,976,944,912,880,848,816,784,1008,748,982,950,918,886,854,822,790,1014, 754,988,956,924,892,860,828,796,1020,760,744,728,1023,994,978,962,946,930, 914,898,882,866,850,834,818,802,786,770,1010,766,734,968,936,904,872,840,808, 776,1000,740,974,942,910,878,846,814,782,1006,746,980,948,916,884,852,820, 788,1012,752,986,954,922,890,858,826,794,1018,758,726,992,960,928,896,864, 832,800,768,764,732,998,966,934,902,870,838,806,774,738,972,940,908,876,844, 812,780,1004] [views:debug,2014-08-19T16:48:44.868,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/726. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:44.868,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",726,active,0} [ns_server:debug,2014-08-19T16:48:45.018,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 724. Nacking mccouch update. [views:debug,2014-08-19T16:48:45.019,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/724. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:45.019,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",724,active,0} [ns_server:debug,2014-08-19T16:48:45.019,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,952,920,888,856,824,792,1016,756,724,990,958,926,894,862,830,798, 1022,762,730,996,964,932,900,868,836,804,772,736,970,938,906,874,842,810,778, 1002,742,976,944,912,880,848,816,784,1008,748,982,950,918,886,854,822,790, 1014,754,988,956,924,892,860,828,796,1020,760,728,994,978,962,946,930,914, 898,882,866,850,834,818,802,786,770,1010,766,734,968,936,904,872,840,808,776, 1000,740,974,942,910,878,846,814,782,1006,746,980,948,916,884,852,820,788, 1012,752,986,954,922,890,858,826,794,1018,758,726,992,960,928,896,864,832, 800,768,764,732,998,966,934,902,870,838,806,774,738,972,940,908,876,844,812, 780,1004,744,1023] [views:debug,2014-08-19T16:48:45.069,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/724. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:45.069,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",724,active,0} [ns_server:debug,2014-08-19T16:48:45.159,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 722. Nacking mccouch update. [views:debug,2014-08-19T16:48:45.159,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/722. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:45.159,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",722,active,0} [ns_server:debug,2014-08-19T16:48:45.159,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,952,920,888,856,824,792,1016,756,724,990,958,926,894,862,830,798, 1022,762,730,996,964,932,900,868,836,804,772,736,970,938,906,874,842,810,778, 1002,742,976,944,912,880,848,816,784,1008,748,982,950,918,886,854,822,790, 1014,754,722,988,956,924,892,860,828,796,1020,760,728,994,978,962,946,930, 914,898,882,866,850,834,818,802,786,770,1010,766,734,968,936,904,872,840,808, 776,1000,740,974,942,910,878,846,814,782,1006,746,980,948,916,884,852,820, 788,1012,752,986,954,922,890,858,826,794,1018,758,726,992,960,928,896,864, 832,800,768,764,732,998,966,934,902,870,838,806,774,738,972,940,908,876,844, 812,780,1004,744,1023] [views:debug,2014-08-19T16:48:45.218,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/722. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:45.218,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",722,active,0} [ns_server:debug,2014-08-19T16:48:45.285,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 720. Nacking mccouch update. [views:debug,2014-08-19T16:48:45.285,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/720. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:45.285,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",720,active,0} [ns_server:debug,2014-08-19T16:48:45.285,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,952,920,888,856,824,792,1016,756,724,990,958,926,894,862,830,798, 1022,762,730,996,964,932,900,868,836,804,772,736,970,938,906,874,842,810,778, 1002,742,976,944,912,880,848,816,784,1008,748,982,950,918,886,854,822,790, 1014,754,722,988,956,924,892,860,828,796,1020,760,728,994,978,962,946,930, 914,898,882,866,850,834,818,802,786,770,1010,766,734,968,936,904,872,840,808, 776,1000,740,974,942,910,878,846,814,782,1006,746,980,948,916,884,852,820, 788,1012,752,720,986,954,922,890,858,826,794,1018,758,726,992,960,928,896, 864,832,800,768,764,732,998,966,934,902,870,838,806,774,738,972,940,908,876, 844,812,780,1004,744,1023] [views:debug,2014-08-19T16:48:45.319,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/720. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:45.319,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",720,active,0} [ns_server:debug,2014-08-19T16:48:45.386,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 718. Nacking mccouch update. [views:debug,2014-08-19T16:48:45.386,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/718. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:45.386,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",718,active,0} [ns_server:debug,2014-08-19T16:48:45.386,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,984,952,920,888,856,824,792,1016,756,724,990,958,926,894,862,830,798, 1022,762,730,996,964,932,900,868,836,804,772,736,970,938,906,874,842,810,778, 1002,742,976,944,912,880,848,816,784,1008,748,982,950,918,886,854,822,790, 1014,754,722,988,956,924,892,860,828,796,1020,760,728,994,978,962,946,930, 914,898,882,866,850,834,818,802,786,770,1010,766,734,968,936,904,872,840,808, 776,1000,740,974,942,910,878,846,814,782,1006,746,980,948,916,884,852,820, 788,1012,752,720,986,954,922,890,858,826,794,1018,758,726,992,960,928,896, 864,832,800,768,764,732,998,966,934,902,870,838,806,774,738,972,940,908,876, 844,812,780,1004,744,1023] [views:debug,2014-08-19T16:48:45.420,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/718. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:45.421,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",718,active,0} [ns_server:debug,2014-08-19T16:48:45.486,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 716. Nacking mccouch update. [views:debug,2014-08-19T16:48:45.486,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/716. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:45.487,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",716,active,0} [ns_server:debug,2014-08-19T16:48:45.487,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,984,952,920,888,856,824,792,1016,756,724,990,958,926,894,862,830,798, 1022,762,730,996,964,932,900,868,836,804,772,736,970,938,906,874,842,810,778, 1002,742,976,944,912,880,848,816,784,1008,748,716,982,950,918,886,854,822, 790,1014,754,722,988,956,924,892,860,828,796,1020,760,728,994,978,962,946, 930,914,898,882,866,850,834,818,802,786,770,1010,766,734,968,936,904,872,840, 808,776,1000,740,974,942,910,878,846,814,782,1006,746,980,948,916,884,852, 820,788,1012,752,720,986,954,922,890,858,826,794,1018,758,726,992,960,928, 896,864,832,800,768,764,732,998,966,934,902,870,838,806,774,738,972,940,908, 876,844,812,780,1004,744,1023] [views:debug,2014-08-19T16:48:45.520,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/716. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:45.520,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",716,active,0} [ns_server:debug,2014-08-19T16:48:45.619,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 714. Nacking mccouch update. [views:debug,2014-08-19T16:48:45.619,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/714. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:45.619,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",714,active,0} [ns_server:debug,2014-08-19T16:48:45.620,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,984,952,920,888,856,824,792,1016,756,724,990,958,926,894,862,830,798, 1022,762,730,996,964,932,900,868,836,804,772,736,970,938,906,874,842,810,778, 1002,742,976,944,912,880,848,816,784,1008,748,716,982,950,918,886,854,822, 790,1014,754,722,988,956,924,892,860,828,796,1020,760,728,994,962,930,898, 866,834,802,770,766,734,968,936,904,872,840,808,776,1000,740,974,942,910,878, 846,814,782,1006,746,714,980,948,916,884,852,820,788,1012,752,720,986,954, 922,890,858,826,794,1018,758,726,992,960,928,896,864,832,800,768,764,732,998, 966,934,902,870,838,806,774,738,972,940,908,876,844,812,780,1004,744,1023, 978,946,914,882,850,818,786,1010] [views:debug,2014-08-19T16:48:45.670,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/714. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:45.670,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",714,active,0} [ns_server:debug,2014-08-19T16:48:45.836,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 712. Nacking mccouch update. [views:debug,2014-08-19T16:48:45.836,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/712. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:45.837,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",712,active,0} [ns_server:debug,2014-08-19T16:48:45.837,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,984,952,920,888,856,824,792,1016,756,724,990,958,926,894,862,830,798, 1022,762,730,996,964,932,900,868,836,804,772,736,970,938,906,874,842,810,778, 1002,742,976,944,912,880,848,816,784,1008,748,716,982,950,918,886,854,822, 790,1014,754,722,988,956,924,892,860,828,796,1020,760,728,994,962,930,898, 866,834,802,770,766,734,968,936,904,872,840,808,776,1000,740,974,942,910,878, 846,814,782,1006,746,714,980,948,916,884,852,820,788,1012,752,720,986,954, 922,890,858,826,794,1018,758,726,992,960,928,896,864,832,800,768,764,732,998, 966,934,902,870,838,806,774,738,972,940,908,876,844,812,780,1004,744,712, 1023,978,946,914,882,850,818,786,1010] [views:debug,2014-08-19T16:48:45.895,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/712. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:45.895,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",712,active,0} [ns_server:debug,2014-08-19T16:48:46.063,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 710. Nacking mccouch update. [views:debug,2014-08-19T16:48:46.063,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/710. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:46.064,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",710,active,0} [ns_server:debug,2014-08-19T16:48:46.064,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,984,952,920,888,856,824,792,1016,756,724,990,958,926,894,862,830,798, 1022,762,730,996,964,932,900,868,836,804,772,736,970,938,906,874,842,810,778, 1002,742,710,976,944,912,880,848,816,784,1008,748,716,982,950,918,886,854, 822,790,1014,754,722,988,956,924,892,860,828,796,1020,760,728,994,962,930, 898,866,834,802,770,766,734,968,936,904,872,840,808,776,1000,740,974,942,910, 878,846,814,782,1006,746,714,980,948,916,884,852,820,788,1012,752,720,986, 954,922,890,858,826,794,1018,758,726,992,960,928,896,864,832,800,768,764,732, 998,966,934,902,870,838,806,774,738,972,940,908,876,844,812,780,1004,744,712, 1023,978,946,914,882,850,818,786,1010] [views:debug,2014-08-19T16:48:46.122,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/710. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:46.122,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",710,active,0} [ns_server:debug,2014-08-19T16:48:46.239,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 708. Nacking mccouch update. [views:debug,2014-08-19T16:48:46.239,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/708. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:46.239,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",708,active,0} [ns_server:debug,2014-08-19T16:48:46.240,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,984,952,920,888,856,824,792,1016,756,724,990,958,926,894,862,830,798, 1022,762,730,996,964,932,900,868,836,804,772,736,970,938,906,874,842,810,778, 1002,742,710,976,944,912,880,848,816,784,1008,748,716,982,950,918,886,854, 822,790,1014,754,722,988,956,924,892,860,828,796,1020,760,728,994,962,930, 898,866,834,802,770,766,734,968,936,904,872,840,808,776,1000,740,708,974,942, 910,878,846,814,782,1006,746,714,980,948,916,884,852,820,788,1012,752,720, 986,954,922,890,858,826,794,1018,758,726,992,960,928,896,864,832,800,768,764, 732,998,966,934,902,870,838,806,774,738,972,940,908,876,844,812,780,1004,744, 712,1023,978,946,914,882,850,818,786,1010] [views:debug,2014-08-19T16:48:46.323,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/708. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:46.323,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",708,active,0} [ns_server:debug,2014-08-19T16:48:46.498,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 706. Nacking mccouch update. [views:debug,2014-08-19T16:48:46.498,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/706. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:46.499,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",706,active,0} [ns_server:debug,2014-08-19T16:48:46.499,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,718,984,952,920,888,856,824,792,1016,756,724,990,958,926,894,862,830,798, 1022,762,730,996,964,932,900,868,836,804,772,736,970,938,906,874,842,810,778, 1002,742,710,976,944,912,880,848,816,784,1008,748,716,982,950,918,886,854, 822,790,1014,754,722,988,956,924,892,860,828,796,1020,760,728,994,962,930, 898,866,834,802,770,766,734,968,936,904,872,840,808,776,1000,740,708,974,942, 910,878,846,814,782,1006,746,714,980,948,916,884,852,820,788,1012,752,720, 986,954,922,890,858,826,794,1018,758,726,992,960,928,896,864,832,800,768,764, 732,998,966,934,902,870,838,806,774,738,706,972,940,908,876,844,812,780,1004, 744,712,1023,978,946,914,882,850,818,786,1010] [views:debug,2014-08-19T16:48:46.571,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/706. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:46.571,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",706,active,0} [ns_server:debug,2014-08-19T16:48:46.646,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 704. Nacking mccouch update. [views:debug,2014-08-19T16:48:46.646,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/704. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:46.646,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",704,active,0} [ns_server:debug,2014-08-19T16:48:46.647,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,952,920,888,856,824,792,1016,756,724,990,958,926,894,862,830,798, 1022,762,730,996,964,932,900,868,836,804,772,736,704,970,938,906,874,842,810, 778,1002,742,710,976,944,912,880,848,816,784,1008,748,716,982,950,918,886, 854,822,790,1014,754,722,988,956,924,892,860,828,796,1020,760,728,994,962, 930,898,866,834,802,770,766,734,968,936,904,872,840,808,776,1000,740,708,974, 942,910,878,846,814,782,1006,746,714,980,948,916,884,852,820,788,1012,752, 720,986,954,922,890,858,826,794,1018,758,726,992,960,928,896,864,832,800,768, 764,732,998,966,934,902,870,838,806,774,738,706,972,940,908,876,844,812,780, 1004,744,712,1023,978,946,914,882,850,818,786,1010,718] [views:debug,2014-08-19T16:48:46.680,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/704. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:46.680,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",704,active,0} [ns_server:debug,2014-08-19T16:48:46.755,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 702. Nacking mccouch update. [views:debug,2014-08-19T16:48:46.755,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/702. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:46.755,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",702,active,0} [ns_server:debug,2014-08-19T16:48:46.756,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,952,920,888,856,824,792,1016,756,724,990,958,926,894,862,830,798, 1022,762,730,996,964,932,900,868,836,804,772,736,704,970,938,906,874,842,810, 778,1002,742,710,976,944,912,880,848,816,784,1008,748,716,982,950,918,886, 854,822,790,1014,754,722,988,956,924,892,860,828,796,1020,760,728,994,962, 930,898,866,834,802,770,766,734,702,968,936,904,872,840,808,776,1000,740,708, 974,942,910,878,846,814,782,1006,746,714,980,948,916,884,852,820,788,1012, 752,720,986,954,922,890,858,826,794,1018,758,726,992,960,928,896,864,832,800, 768,764,732,998,966,934,902,870,838,806,774,738,706,972,940,908,876,844,812, 780,1004,744,712,1023,978,946,914,882,850,818,786,1010,718] [views:debug,2014-08-19T16:48:46.789,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/702. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:46.789,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",702,active,0} [ns_server:debug,2014-08-19T16:48:46.856,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 700. Nacking mccouch update. [views:debug,2014-08-19T16:48:46.856,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/700. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:46.856,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",700,active,0} [ns_server:debug,2014-08-19T16:48:46.857,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,952,920,888,856,824,792,1016,756,724,990,958,926,894,862,830,798, 1022,762,730,996,964,932,900,868,836,804,772,736,704,970,938,906,874,842,810, 778,1002,742,710,976,944,912,880,848,816,784,1008,748,716,982,950,918,886, 854,822,790,1014,754,722,988,956,924,892,860,828,796,1020,760,728,994,962, 930,898,866,834,802,770,766,734,702,968,936,904,872,840,808,776,1000,740,708, 974,942,910,878,846,814,782,1006,746,714,980,948,916,884,852,820,788,1012, 752,720,986,954,922,890,858,826,794,1018,758,726,992,960,928,896,864,832,800, 768,764,732,700,998,966,934,902,870,838,806,774,738,706,972,940,908,876,844, 812,780,1004,744,712,1023,978,946,914,882,850,818,786,1010,718] [views:debug,2014-08-19T16:48:46.890,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/700. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:46.890,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",700,active,0} [ns_server:debug,2014-08-19T16:48:46.958,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 698. Nacking mccouch update. [views:debug,2014-08-19T16:48:46.958,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/698. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:46.958,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",698,active,0} [ns_server:debug,2014-08-19T16:48:46.959,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,952,920,888,856,824,792,1016,756,724,990,958,926,894,862,830,798, 1022,762,730,698,996,964,932,900,868,836,804,772,736,704,970,938,906,874,842, 810,778,1002,742,710,976,944,912,880,848,816,784,1008,748,716,982,950,918, 886,854,822,790,1014,754,722,988,956,924,892,860,828,796,1020,760,728,994, 962,930,898,866,834,802,770,766,734,702,968,936,904,872,840,808,776,1000,740, 708,974,942,910,878,846,814,782,1006,746,714,980,948,916,884,852,820,788, 1012,752,720,986,954,922,890,858,826,794,1018,758,726,992,960,928,896,864, 832,800,768,764,732,700,998,966,934,902,870,838,806,774,738,706,972,940,908, 876,844,812,780,1004,744,712,1023,978,946,914,882,850,818,786,1010,718] [views:debug,2014-08-19T16:48:47.017,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/698. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:47.017,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",698,active,0} [ns_server:debug,2014-08-19T16:48:47.149,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 696. Nacking mccouch update. [views:debug,2014-08-19T16:48:47.149,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/696. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:47.149,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",696,active,0} [ns_server:debug,2014-08-19T16:48:47.149,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,952,920,888,856,824,792,1016,756,724,990,958,926,894,862,830,798, 1022,762,730,698,996,964,932,900,868,836,804,772,736,704,970,938,906,874,842, 810,778,1002,742,710,976,944,912,880,848,816,784,1008,748,716,982,950,918, 886,854,822,790,1014,754,722,988,956,924,892,860,828,796,1020,760,728,696, 994,962,930,898,866,834,802,770,766,734,702,968,936,904,872,840,808,776,1000, 740,708,974,942,910,878,846,814,782,1006,746,714,980,948,916,884,852,820,788, 1012,752,720,986,954,922,890,858,826,794,1018,758,726,992,960,928,896,864, 832,800,768,764,732,700,998,966,934,902,870,838,806,774,738,706,972,940,908, 876,844,812,780,1004,744,712,1023,978,946,914,882,850,818,786,1010,718] [views:debug,2014-08-19T16:48:47.233,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/696. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:47.233,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",696,active,0} [ns_server:debug,2014-08-19T16:48:47.399,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 694. Nacking mccouch update. [views:debug,2014-08-19T16:48:47.400,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/694. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:47.400,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",694,active,0} [ns_server:debug,2014-08-19T16:48:47.400,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,920,856,792,756,724,990,958,926,894,862,830,798,1022,762,730,698,996, 964,932,900,868,836,804,772,736,704,970,938,906,874,842,810,778,1002,742,710, 976,944,912,880,848,816,784,1008,748,716,982,950,918,886,854,822,790,1014, 754,722,988,956,924,892,860,828,796,1020,760,728,696,994,962,930,898,866,834, 802,770,766,734,702,968,936,904,872,840,808,776,1000,740,708,974,942,910,878, 846,814,782,1006,746,714,980,948,916,884,852,820,788,1012,752,720,986,954, 922,890,858,826,794,1018,758,726,694,992,960,928,896,864,832,800,768,764,732, 700,998,966,934,902,870,838,806,774,738,706,972,940,908,876,844,812,780,1004, 744,712,1023,978,946,914,882,850,818,786,1010,718,952,888,824,1016] [views:debug,2014-08-19T16:48:47.483,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/694. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:47.483,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",694,active,0} [ns_server:debug,2014-08-19T16:48:47.650,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 692. Nacking mccouch update. [views:debug,2014-08-19T16:48:47.650,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/692. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:47.651,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",692,active,0} [ns_server:debug,2014-08-19T16:48:47.651,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,920,856,792,756,724,692,990,958,926,894,862,830,798,1022,762,730,698, 996,964,932,900,868,836,804,772,736,704,970,938,906,874,842,810,778,1002,742, 710,976,944,912,880,848,816,784,1008,748,716,982,950,918,886,854,822,790, 1014,754,722,988,956,924,892,860,828,796,1020,760,728,696,994,962,930,898, 866,834,802,770,766,734,702,968,936,904,872,840,808,776,1000,740,708,974,942, 910,878,846,814,782,1006,746,714,980,948,916,884,852,820,788,1012,752,720, 986,954,922,890,858,826,794,1018,758,726,694,992,960,928,896,864,832,800,768, 764,732,700,998,966,934,902,870,838,806,774,738,706,972,940,908,876,844,812, 780,1004,744,712,1023,978,946,914,882,850,818,786,1010,718,952,888,824,1016] [views:debug,2014-08-19T16:48:47.793,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/692. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:47.793,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",692,active,0} [ns_server:debug,2014-08-19T16:48:47.951,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 690. Nacking mccouch update. [views:debug,2014-08-19T16:48:47.951,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/690. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:47.951,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",690,active,0} [ns_server:debug,2014-08-19T16:48:47.952,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,920,856,792,756,724,692,990,958,926,894,862,830,798,1022,762,730,698, 996,964,932,900,868,836,804,772,736,704,970,938,906,874,842,810,778,1002,742, 710,976,944,912,880,848,816,784,1008,748,716,982,950,918,886,854,822,790, 1014,754,722,690,988,956,924,892,860,828,796,1020,760,728,696,994,962,930, 898,866,834,802,770,766,734,702,968,936,904,872,840,808,776,1000,740,708,974, 942,910,878,846,814,782,1006,746,714,980,948,916,884,852,820,788,1012,752, 720,986,954,922,890,858,826,794,1018,758,726,694,992,960,928,896,864,832,800, 768,764,732,700,998,966,934,902,870,838,806,774,738,706,972,940,908,876,844, 812,780,1004,744,712,1023,978,946,914,882,850,818,786,1010,718,952,888,824, 1016] [views:debug,2014-08-19T16:48:48.018,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/690. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:48.019,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",690,active,0} [ns_server:debug,2014-08-19T16:48:48.183,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 688. Nacking mccouch update. [views:debug,2014-08-19T16:48:48.183,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/688. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:48.184,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",688,active,0} [ns_server:debug,2014-08-19T16:48:48.184,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,984,920,856,792,756,724,692,990,958,926,894,862,830,798,1022,762,730,698, 996,964,932,900,868,836,804,772,736,704,970,938,906,874,842,810,778,1002,742, 710,976,944,912,880,848,816,784,1008,748,716,982,950,918,886,854,822,790, 1014,754,722,690,988,956,924,892,860,828,796,1020,760,728,696,994,962,930, 898,866,834,802,770,766,734,702,968,936,904,872,840,808,776,1000,740,708,974, 942,910,878,846,814,782,1006,746,714,980,948,916,884,852,820,788,1012,752, 720,688,986,954,922,890,858,826,794,1018,758,726,694,992,960,928,896,864,832, 800,768,764,732,700,998,966,934,902,870,838,806,774,738,706,972,940,908,876, 844,812,780,1004,744,712,1023,978,946,914,882,850,818,786,1010,718,952,888, 824,1016] [views:debug,2014-08-19T16:48:48.218,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/688. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:48.218,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",688,active,0} [ns_server:debug,2014-08-19T16:48:48.284,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 686. Nacking mccouch update. [views:debug,2014-08-19T16:48:48.284,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/686. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:48.285,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",686,active,0} [ns_server:debug,2014-08-19T16:48:48.285,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,756,724,692,990,958,926,894,862,830,798,1022,762,730, 698,996,964,932,900,868,836,804,772,736,704,970,938,906,874,842,810,778,1002, 742,710,976,944,912,880,848,816,784,1008,748,716,982,950,918,886,854,822,790, 1014,754,722,690,988,956,924,892,860,828,796,1020,760,728,696,994,962,930, 898,866,834,802,770,766,734,702,968,936,904,872,840,808,776,1000,740,708,974, 942,910,878,846,814,782,1006,746,714,980,948,916,884,852,820,788,1012,752, 720,688,986,954,922,890,858,826,794,1018,758,726,694,992,960,928,896,864,832, 800,768,764,732,700,998,966,934,902,870,838,806,774,738,706,972,940,908,876, 844,812,780,1004,744,712,1023,978,946,914,882,850,818,786,1010,718,952,888, 824,1016] [views:debug,2014-08-19T16:48:48.318,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/686. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:48.319,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",686,active,0} [ns_server:debug,2014-08-19T16:48:48.385,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 684. Nacking mccouch update. [views:debug,2014-08-19T16:48:48.385,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/684. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:48.385,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",684,active,0} [ns_server:debug,2014-08-19T16:48:48.386,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,990,958,926,894,862,830,798,1022,762,730,698,996, 964,932,900,868,836,804,772,736,704,970,938,906,874,842,810,778,1002,742,710, 976,944,912,880,848,816,784,1008,748,716,684,982,950,918,886,854,822,790, 1014,754,722,690,988,956,924,892,860,828,796,1020,760,728,696,994,962,930, 898,866,834,802,770,766,734,702,968,936,904,872,840,808,776,1000,740,708,974, 942,910,878,846,814,782,1006,746,714,980,948,916,884,852,820,788,1012,752, 720,688,986,954,922,890,858,826,794,1018,758,726,694,992,960,928,896,864,832, 800,768,764,732,700,998,966,934,902,870,838,806,774,738,706,972,940,908,876, 844,812,780,1004,744,712,1023,978,946,914,882,850,818,786,1010,718,952,888, 824,1016,756,692] [views:debug,2014-08-19T16:48:48.419,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/684. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:48.419,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",684,active,0} [ns_server:debug,2014-08-19T16:48:48.486,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 682. Nacking mccouch update. [views:debug,2014-08-19T16:48:48.486,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/682. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:48.486,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",682,active,0} [ns_server:debug,2014-08-19T16:48:48.486,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,990,958,926,894,862,830,798,1022,762,730,698,996, 964,932,900,868,836,804,772,736,704,970,938,906,874,842,810,778,1002,742,710, 976,944,912,880,848,816,784,1008,748,716,684,982,950,918,886,854,822,790, 1014,754,722,690,988,956,924,892,860,828,796,1020,760,728,696,994,962,930, 898,866,834,802,770,766,734,702,968,936,904,872,840,808,776,1000,740,708,974, 942,910,878,846,814,782,1006,746,714,682,980,948,916,884,852,820,788,1012, 752,720,688,986,954,922,890,858,826,794,1018,758,726,694,992,960,928,896,864, 832,800,768,764,732,700,998,966,934,902,870,838,806,774,738,706,972,940,908, 876,844,812,780,1004,744,712,1023,978,946,914,882,850,818,786,1010,718,952, 888,824,1016,756,692] [views:debug,2014-08-19T16:48:48.520,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/682. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:48.520,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",682,active,0} [ns_server:debug,2014-08-19T16:48:48.587,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 680. Nacking mccouch update. [views:debug,2014-08-19T16:48:48.587,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/680. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:48.587,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",680,active,0} [ns_server:debug,2014-08-19T16:48:48.587,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,990,958,926,894,862,830,798,1022,762,730,698,996, 964,932,900,868,836,804,772,736,704,970,938,906,874,842,810,778,1002,742,710, 976,944,912,880,848,816,784,1008,748,716,684,982,950,918,886,854,822,790, 1014,754,722,690,988,956,924,892,860,828,796,1020,760,728,696,994,962,930, 898,866,834,802,770,766,734,702,968,936,904,872,840,808,776,1000,740,708,974, 942,910,878,846,814,782,1006,746,714,682,980,948,916,884,852,820,788,1012, 752,720,688,986,954,922,890,858,826,794,1018,758,726,694,992,960,928,896,864, 832,800,768,764,732,700,998,966,934,902,870,838,806,774,738,706,972,940,908, 876,844,812,780,1004,744,712,680,1023,978,946,914,882,850,818,786,1010,718, 952,888,824,1016,756,692] [views:debug,2014-08-19T16:48:48.621,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/680. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:48.621,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",680,active,0} [ns_server:debug,2014-08-19T16:48:48.752,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 678. Nacking mccouch update. [views:debug,2014-08-19T16:48:48.752,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/678. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:48.753,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",678,active,0} [ns_server:debug,2014-08-19T16:48:48.753,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,990,958,926,894,862,830,798,1022,762,730,698,996, 964,932,900,868,836,804,772,736,704,970,938,906,874,842,810,778,1002,742,710, 678,976,944,912,880,848,816,784,1008,748,716,684,982,950,918,886,854,822,790, 1014,754,722,690,988,956,924,892,860,828,796,1020,760,728,696,994,962,930, 898,866,834,802,770,766,734,702,968,936,904,872,840,808,776,1000,740,708,974, 942,910,878,846,814,782,1006,746,714,682,980,948,916,884,852,820,788,1012, 752,720,688,986,954,922,890,858,826,794,1018,758,726,694,992,960,928,896,864, 832,800,768,764,732,700,998,966,934,902,870,838,806,774,738,706,972,940,908, 876,844,812,780,1004,744,712,680,1023,978,946,914,882,850,818,786,1010,718, 952,888,824,1016,756,692] [views:debug,2014-08-19T16:48:48.811,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/678. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:48.811,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",678,active,0} [ns_server:debug,2014-08-19T16:48:48.953,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 676. Nacking mccouch update. [views:debug,2014-08-19T16:48:48.953,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/676. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:48.953,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",676,active,0} [ns_server:debug,2014-08-19T16:48:48.954,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,990,958,926,894,862,830,798,1022,762,730,698,996, 964,932,900,868,836,804,772,736,704,970,938,906,874,842,810,778,1002,742,710, 678,976,944,912,880,848,816,784,1008,748,716,684,982,950,918,886,854,822,790, 1014,754,722,690,988,956,924,892,860,828,796,1020,760,728,696,994,962,930, 898,866,834,802,770,766,734,702,968,936,904,872,840,808,776,1000,740,708,676, 974,942,910,878,846,814,782,1006,746,714,682,980,948,916,884,852,820,788, 1012,752,720,688,986,954,922,890,858,826,794,1018,758,726,694,992,960,928, 896,864,832,800,768,764,732,700,998,966,934,902,870,838,806,774,738,706,972, 940,908,876,844,812,780,1004,744,712,680,1023,978,946,914,882,850,818,786, 1010,718,952,888,824,1016,756,692] [views:debug,2014-08-19T16:48:48.996,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/676. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:48.996,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",676,active,0} [ns_server:debug,2014-08-19T16:48:49.146,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 674. Nacking mccouch update. [views:debug,2014-08-19T16:48:49.146,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/674. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:49.146,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",674,active,0} [ns_server:debug,2014-08-19T16:48:49.146,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,958,894,830,1022,762,730,698,996,964,932,900,868, 836,804,772,736,704,970,938,906,874,842,810,778,1002,742,710,678,976,944,912, 880,848,816,784,1008,748,716,684,982,950,918,886,854,822,790,1014,754,722, 690,988,956,924,892,860,828,796,1020,760,728,696,994,962,930,898,866,834,802, 770,766,734,702,968,936,904,872,840,808,776,1000,740,708,676,974,942,910,878, 846,814,782,1006,746,714,682,980,948,916,884,852,820,788,1012,752,720,688, 986,954,922,890,858,826,794,1018,758,726,694,992,960,928,896,864,832,800,768, 764,732,700,998,966,934,902,870,838,806,774,738,706,674,972,940,908,876,844, 812,780,1004,744,712,680,1023,978,946,914,882,850,818,786,1010,718,952,888, 824,1016,756,692,990,926,862,798] [views:debug,2014-08-19T16:48:49.222,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/674. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:49.222,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",674,active,0} [ns_server:debug,2014-08-19T16:48:49.380,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 672. Nacking mccouch update. [views:debug,2014-08-19T16:48:49.380,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/672. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:49.380,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",672,active,0} [ns_server:debug,2014-08-19T16:48:49.380,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,958,894,830,1022,762,730,698,996,964,932,900,868, 836,804,772,736,704,672,970,938,906,874,842,810,778,1002,742,710,678,976,944, 912,880,848,816,784,1008,748,716,684,982,950,918,886,854,822,790,1014,754, 722,690,988,956,924,892,860,828,796,1020,760,728,696,994,962,930,898,866,834, 802,770,766,734,702,968,936,904,872,840,808,776,1000,740,708,676,974,942,910, 878,846,814,782,1006,746,714,682,980,948,916,884,852,820,788,1012,752,720, 688,986,954,922,890,858,826,794,1018,758,726,694,992,960,928,896,864,832,800, 768,764,732,700,998,966,934,902,870,838,806,774,738,706,674,972,940,908,876, 844,812,780,1004,744,712,680,1023,978,946,914,882,850,818,786,1010,718,952, 888,824,1016,756,692,990,926,862,798] [views:debug,2014-08-19T16:48:49.440,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/672. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:49.440,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",672,active,0} [ns_server:debug,2014-08-19T16:48:49.537,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 670. Nacking mccouch update. [views:debug,2014-08-19T16:48:49.537,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/670. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:49.537,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",670,active,0} [ns_server:debug,2014-08-19T16:48:49.538,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,958,894,830,1022,762,730,698,996,964,932,900,868, 836,804,772,736,704,672,970,938,906,874,842,810,778,1002,742,710,678,976,944, 912,880,848,816,784,1008,748,716,684,982,950,918,886,854,822,790,1014,754, 722,690,988,956,924,892,860,828,796,1020,760,728,696,994,962,930,898,866,834, 802,770,766,734,702,670,968,936,904,872,840,808,776,1000,740,708,676,974,942, 910,878,846,814,782,1006,746,714,682,980,948,916,884,852,820,788,1012,752, 720,688,986,954,922,890,858,826,794,1018,758,726,694,992,960,928,896,864,832, 800,768,764,732,700,998,966,934,902,870,838,806,774,738,706,674,972,940,908, 876,844,812,780,1004,744,712,680,1023,978,946,914,882,850,818,786,1010,718, 952,888,824,1016,756,692,990,926,862,798] [views:debug,2014-08-19T16:48:49.596,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/670. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:49.596,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",670,active,0} [ns_server:debug,2014-08-19T16:48:49.763,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 668. Nacking mccouch update. [views:debug,2014-08-19T16:48:49.763,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/668. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:49.763,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",668,active,0} [ns_server:debug,2014-08-19T16:48:49.764,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,958,894,830,1022,762,730,698,996,964,932,900,868, 836,804,772,736,704,672,970,938,906,874,842,810,778,1002,742,710,678,976,944, 912,880,848,816,784,1008,748,716,684,982,950,918,886,854,822,790,1014,754, 722,690,988,956,924,892,860,828,796,1020,760,728,696,994,962,930,898,866,834, 802,770,766,734,702,670,968,936,904,872,840,808,776,1000,740,708,676,974,942, 910,878,846,814,782,1006,746,714,682,980,948,916,884,852,820,788,1012,752, 720,688,986,954,922,890,858,826,794,1018,758,726,694,992,960,928,896,864,832, 800,768,764,732,700,668,998,966,934,902,870,838,806,774,738,706,674,972,940, 908,876,844,812,780,1004,744,712,680,1023,978,946,914,882,850,818,786,1010, 718,952,888,824,1016,756,692,990,926,862,798] [views:debug,2014-08-19T16:48:49.822,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/668. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:49.822,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",668,active,0} [ns_server:debug,2014-08-19T16:48:49.989,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 666. Nacking mccouch update. [views:debug,2014-08-19T16:48:49.989,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/666. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:49.989,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",666,active,0} [ns_server:debug,2014-08-19T16:48:49.990,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,958,894,830,1022,762,730,698,666,996,964,932,900, 868,836,804,772,736,704,672,970,938,906,874,842,810,778,1002,742,710,678,976, 944,912,880,848,816,784,1008,748,716,684,982,950,918,886,854,822,790,1014, 754,722,690,988,956,924,892,860,828,796,1020,760,728,696,994,962,930,898,866, 834,802,770,766,734,702,670,968,936,904,872,840,808,776,1000,740,708,676,974, 942,910,878,846,814,782,1006,746,714,682,980,948,916,884,852,820,788,1012, 752,720,688,986,954,922,890,858,826,794,1018,758,726,694,992,960,928,896,864, 832,800,768,764,732,700,668,998,966,934,902,870,838,806,774,738,706,674,972, 940,908,876,844,812,780,1004,744,712,680,1023,978,946,914,882,850,818,786, 1010,718,952,888,824,1016,756,692,990,926,862,798] [views:debug,2014-08-19T16:48:50.048,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/666. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:50.048,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",666,active,0} [ns_server:debug,2014-08-19T16:48:50.215,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 664. Nacking mccouch update. [views:debug,2014-08-19T16:48:50.215,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/664. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:50.215,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",664,active,0} [ns_server:debug,2014-08-19T16:48:50.215,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,958,894,830,1022,762,698,996,964,932,900,868,836, 804,772,736,704,672,970,938,906,874,842,810,778,1002,742,710,678,976,944,912, 880,848,816,784,1008,748,716,684,982,950,918,886,854,822,790,1014,754,722, 690,988,956,924,892,860,828,796,1020,760,728,696,664,994,962,930,898,866,834, 802,770,766,734,702,670,968,936,904,872,840,808,776,1000,740,708,676,974,942, 910,878,846,814,782,1006,746,714,682,980,948,916,884,852,820,788,1012,752, 720,688,986,954,922,890,858,826,794,1018,758,726,694,992,960,928,896,864,832, 800,768,764,732,700,668,998,966,934,902,870,838,806,774,738,706,674,972,940, 908,876,844,812,780,1004,744,712,680,1023,978,946,914,882,850,818,786,1010, 718,952,888,824,1016,756,692,990,926,862,798,730,666] [views:debug,2014-08-19T16:48:50.274,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/664. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:50.274,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",664,active,0} [ns_server:debug,2014-08-19T16:48:50.440,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 662. Nacking mccouch update. [views:debug,2014-08-19T16:48:50.440,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/662. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:50.441,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",662,active,0} [ns_server:debug,2014-08-19T16:48:50.441,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,958,894,830,1022,762,698,996,964,932,900,868,836, 804,772,736,704,672,970,938,906,874,842,810,778,1002,742,710,678,976,944,912, 880,848,816,784,1008,748,716,684,982,950,918,886,854,822,790,1014,754,722, 690,988,956,924,892,860,828,796,1020,760,728,696,664,994,962,930,898,866,834, 802,770,766,734,702,670,968,936,904,872,840,808,776,1000,740,708,676,974,942, 910,878,846,814,782,1006,746,714,682,980,948,916,884,852,820,788,1012,752, 720,688,986,954,922,890,858,826,794,1018,758,726,694,662,992,960,928,896,864, 832,800,768,764,732,700,668,998,966,934,902,870,838,806,774,738,706,674,972, 940,908,876,844,812,780,1004,744,712,680,1023,978,946,914,882,850,818,786, 1010,718,952,888,824,1016,756,692,990,926,862,798,730,666] [views:debug,2014-08-19T16:48:50.524,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/662. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:50.524,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",662,active,0} [ns_server:debug,2014-08-19T16:48:50.624,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 660. Nacking mccouch update. [views:debug,2014-08-19T16:48:50.624,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/660. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:50.624,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",660,active,0} [ns_server:debug,2014-08-19T16:48:50.625,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,660,958,894,830,1022,762,698,996,964,932,900,868, 836,804,772,736,704,672,970,938,906,874,842,810,778,1002,742,710,678,976,944, 912,880,848,816,784,1008,748,716,684,982,950,918,886,854,822,790,1014,754, 722,690,988,956,924,892,860,828,796,1020,760,728,696,664,994,962,930,898,866, 834,802,770,766,734,702,670,968,936,904,872,840,808,776,1000,740,708,676,974, 942,910,878,846,814,782,1006,746,714,682,980,948,916,884,852,820,788,1012, 752,720,688,986,954,922,890,858,826,794,1018,758,726,694,662,992,960,928,896, 864,832,800,768,764,732,700,668,998,966,934,902,870,838,806,774,738,706,674, 972,940,908,876,844,812,780,1004,744,712,680,1023,978,946,914,882,850,818, 786,1010,718,952,888,824,1016,756,692,990,926,862,798,730,666] [views:debug,2014-08-19T16:48:50.674,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/660. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:50.675,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",660,active,0} [ns_server:debug,2014-08-19T16:48:50.758,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 658. Nacking mccouch update. [views:debug,2014-08-19T16:48:50.758,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/658. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:50.758,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",658,active,0} [ns_server:debug,2014-08-19T16:48:50.759,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,660,958,894,830,1022,762,698,996,964,932,900,868, 836,804,772,736,704,672,970,938,906,874,842,810,778,1002,742,710,678,976,944, 912,880,848,816,784,1008,748,716,684,982,950,918,886,854,822,790,1014,754, 722,690,658,988,956,924,892,860,828,796,1020,760,728,696,664,994,962,930,898, 866,834,802,770,766,734,702,670,968,936,904,872,840,808,776,1000,740,708,676, 974,942,910,878,846,814,782,1006,746,714,682,980,948,916,884,852,820,788, 1012,752,720,688,986,954,922,890,858,826,794,1018,758,726,694,662,992,960, 928,896,864,832,800,768,764,732,700,668,998,966,934,902,870,838,806,774,738, 706,674,972,940,908,876,844,812,780,1004,744,712,680,1023,978,946,914,882, 850,818,786,1010,718,952,888,824,1016,756,692,990,926,862,798,730,666] [views:debug,2014-08-19T16:48:50.809,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/658. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:50.809,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",658,active,0} [ns_server:debug,2014-08-19T16:48:50.892,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 656. Nacking mccouch update. [views:debug,2014-08-19T16:48:50.892,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/656. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:50.892,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",656,active,0} [ns_server:debug,2014-08-19T16:48:50.893,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,660,958,894,830,1022,762,698,996,964,932,900,868, 836,804,772,736,704,672,970,938,906,874,842,810,778,1002,742,710,678,976,944, 912,880,848,816,784,1008,748,716,684,982,950,918,886,854,822,790,1014,754, 722,690,658,988,956,924,892,860,828,796,1020,760,728,696,664,994,962,930,898, 866,834,802,770,766,734,702,670,968,936,904,872,840,808,776,1000,740,708,676, 974,942,910,878,846,814,782,1006,746,714,682,980,948,916,884,852,820,788, 1012,752,720,688,656,986,954,922,890,858,826,794,1018,758,726,694,662,992, 960,928,896,864,832,800,768,764,732,700,668,998,966,934,902,870,838,806,774, 738,706,674,972,940,908,876,844,812,780,1004,744,712,680,1023,978,946,914, 882,850,818,786,1010,718,952,888,824,1016,756,692,990,926,862,798,730,666] [views:debug,2014-08-19T16:48:50.943,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/656. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:50.943,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",656,active,0} [ns_server:debug,2014-08-19T16:48:51.010,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 654. Nacking mccouch update. [views:debug,2014-08-19T16:48:51.010,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/654. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:51.010,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",654,active,0} [ns_server:debug,2014-08-19T16:48:51.010,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,660,958,894,830,1022,762,698,996,932,868,804,736, 704,672,970,938,906,874,842,810,778,1002,742,710,678,976,944,912,880,848,816, 784,1008,748,716,684,982,950,918,886,854,822,790,1014,754,722,690,658,988, 956,924,892,860,828,796,1020,760,728,696,664,994,962,930,898,866,834,802,770, 766,734,702,670,968,936,904,872,840,808,776,1000,740,708,676,974,942,910,878, 846,814,782,1006,746,714,682,980,948,916,884,852,820,788,1012,752,720,688, 656,986,954,922,890,858,826,794,1018,758,726,694,662,992,960,928,896,864,832, 800,768,764,732,700,668,998,966,934,902,870,838,806,774,738,706,674,972,940, 908,876,844,812,780,1004,744,712,680,1023,978,946,914,882,850,818,786,1010, 718,654,952,888,824,1016,756,692,990,926,862,798,730,666,964,900,836,772] [views:debug,2014-08-19T16:48:51.043,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/654. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:51.044,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",654,active,0} [ns_server:debug,2014-08-19T16:48:51.175,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 652. Nacking mccouch update. [views:debug,2014-08-19T16:48:51.176,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/652. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:51.176,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",652,active,0} [ns_server:debug,2014-08-19T16:48:51.176,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,660,958,894,830,1022,762,698,996,932,868,804,736, 704,672,970,938,906,874,842,810,778,1002,742,710,678,976,944,912,880,848,816, 784,1008,748,716,684,652,982,950,918,886,854,822,790,1014,754,722,690,658, 988,956,924,892,860,828,796,1020,760,728,696,664,994,962,930,898,866,834,802, 770,766,734,702,670,968,936,904,872,840,808,776,1000,740,708,676,974,942,910, 878,846,814,782,1006,746,714,682,980,948,916,884,852,820,788,1012,752,720, 688,656,986,954,922,890,858,826,794,1018,758,726,694,662,992,960,928,896,864, 832,800,768,764,732,700,668,998,966,934,902,870,838,806,774,738,706,674,972, 940,908,876,844,812,780,1004,744,712,680,1023,978,946,914,882,850,818,786, 1010,718,654,952,888,824,1016,756,692,990,926,862,798,730,666,964,900,836, 772] [views:debug,2014-08-19T16:48:51.260,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/652. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:51.260,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",652,active,0} [ns_server:debug,2014-08-19T16:48:51.443,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 650. Nacking mccouch update. [views:debug,2014-08-19T16:48:51.443,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/650. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:51.443,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",650,active,0} [ns_server:debug,2014-08-19T16:48:51.444,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,660,958,894,830,1022,762,698,996,932,868,804,736, 704,672,970,938,906,874,842,810,778,1002,742,710,678,976,944,912,880,848,816, 784,1008,748,716,684,652,982,950,918,886,854,822,790,1014,754,722,690,658, 988,956,924,892,860,828,796,1020,760,728,696,664,994,962,930,898,866,834,802, 770,766,734,702,670,968,936,904,872,840,808,776,1000,740,708,676,974,942,910, 878,846,814,782,1006,746,714,682,650,980,948,916,884,852,820,788,1012,752, 720,688,656,986,954,922,890,858,826,794,1018,758,726,694,662,992,960,928,896, 864,832,800,768,764,732,700,668,998,966,934,902,870,838,806,774,738,706,674, 972,940,908,876,844,812,780,1004,744,712,680,1023,978,946,914,882,850,818, 786,1010,718,654,952,888,824,1016,756,692,990,926,862,798,730,666,964,900, 836,772] [views:debug,2014-08-19T16:48:51.527,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/650. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:51.527,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",650,active,0} [ns_server:debug,2014-08-19T16:48:51.677,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 648. Nacking mccouch update. [views:debug,2014-08-19T16:48:51.677,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/648. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:51.677,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",648,active,0} [ns_server:debug,2014-08-19T16:48:51.678,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,660,958,894,830,1022,762,698,996,932,868,804,736, 704,672,970,938,906,874,842,810,778,1002,742,710,678,976,944,912,880,848,816, 784,1008,748,716,684,652,982,950,918,886,854,822,790,1014,754,722,690,658, 988,956,924,892,860,828,796,1020,760,728,696,664,994,962,930,898,866,834,802, 770,766,734,702,670,968,936,904,872,840,808,776,1000,740,708,676,974,942,910, 878,846,814,782,1006,746,714,682,650,980,948,916,884,852,820,788,1012,752, 720,688,656,986,954,922,890,858,826,794,1018,758,726,694,662,992,960,928,896, 864,832,800,768,764,732,700,668,998,966,934,902,870,838,806,774,738,706,674, 972,940,908,876,844,812,780,1004,744,712,680,648,1023,978,946,914,882,850, 818,786,1010,718,654,952,888,824,1016,756,692,990,926,862,798,730,666,964, 900,836,772] [views:debug,2014-08-19T16:48:51.728,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/648. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:51.728,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",648,active,0} [ns_server:debug,2014-08-19T16:48:51.869,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 646. Nacking mccouch update. [views:debug,2014-08-19T16:48:51.870,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/646. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:51.870,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",646,active,0} [ns_server:debug,2014-08-19T16:48:51.870,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,660,958,894,830,1022,762,698,996,932,868,804,736, 704,672,970,938,906,874,842,810,778,1002,742,710,678,646,976,944,912,880,848, 816,784,1008,748,716,684,652,982,950,918,886,854,822,790,1014,754,722,690, 658,988,956,924,892,860,828,796,1020,760,728,696,664,994,962,930,898,866,834, 802,770,766,734,702,670,968,936,904,872,840,808,776,1000,740,708,676,974,942, 910,878,846,814,782,1006,746,714,682,650,980,948,916,884,852,820,788,1012, 752,720,688,656,986,954,922,890,858,826,794,1018,758,726,694,662,992,960,928, 896,864,832,800,768,764,732,700,668,998,966,934,902,870,838,806,774,738,706, 674,972,940,908,876,844,812,780,1004,744,712,680,648,1023,978,946,914,882, 850,818,786,1010,718,654,952,888,824,1016,756,692,990,926,862,798,730,666, 964,900,836,772] [views:debug,2014-08-19T16:48:51.921,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/646. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:51.921,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",646,active,0} [ns_server:debug,2014-08-19T16:48:52.070,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 644. Nacking mccouch update. [views:debug,2014-08-19T16:48:52.070,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/644. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:52.071,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",644,active,0} [ns_server:debug,2014-08-19T16:48:52.071,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,660,958,894,830,1022,762,698,996,932,868,804,736, 672,970,938,906,874,842,810,778,1002,742,710,678,646,976,944,912,880,848,816, 784,1008,748,716,684,652,982,950,918,886,854,822,790,1014,754,722,690,658, 988,956,924,892,860,828,796,1020,760,728,696,664,994,962,930,898,866,834,802, 770,766,734,702,670,968,936,904,872,840,808,776,1000,740,708,676,644,974,942, 910,878,846,814,782,1006,746,714,682,650,980,948,916,884,852,820,788,1012, 752,720,688,656,986,954,922,890,858,826,794,1018,758,726,694,662,992,960,928, 896,864,832,800,768,764,732,700,668,998,966,934,902,870,838,806,774,738,706, 674,972,940,908,876,844,812,780,1004,744,712,680,648,1023,978,946,914,882, 850,818,786,1010,718,654,952,888,824,1016,756,692,990,926,862,798,730,666, 964,900,836,772,704] [views:debug,2014-08-19T16:48:52.119,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/644. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:52.119,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",644,active,0} [ns_server:debug,2014-08-19T16:48:52.186,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 642. Nacking mccouch update. [views:debug,2014-08-19T16:48:52.186,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/642. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:52.186,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",642,active,0} [ns_server:debug,2014-08-19T16:48:52.186,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,660,958,894,830,1022,762,698,996,932,868,804,736, 672,970,938,906,874,842,810,778,1002,742,710,678,646,976,944,912,880,848,816, 784,1008,748,716,684,652,982,950,918,886,854,822,790,1014,754,722,690,658, 988,956,924,892,860,828,796,1020,760,728,696,664,994,962,930,898,866,834,802, 770,766,734,702,670,968,936,904,872,840,808,776,1000,740,708,676,644,974,942, 910,878,846,814,782,1006,746,714,682,650,980,948,916,884,852,820,788,1012, 752,720,688,656,986,954,922,890,858,826,794,1018,758,726,694,662,992,960,928, 896,864,832,800,768,764,732,700,668,998,966,934,902,870,838,806,774,738,706, 674,642,972,940,908,876,844,812,780,1004,744,712,680,648,1023,978,946,914, 882,850,818,786,1010,718,654,952,888,824,1016,756,692,990,926,862,798,730, 666,964,900,836,772,704] [views:debug,2014-08-19T16:48:52.220,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/642. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:52.220,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",642,active,0} [ns_server:debug,2014-08-19T16:48:52.312,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 640. Nacking mccouch update. [views:debug,2014-08-19T16:48:52.312,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/640. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:52.312,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",640,active,0} [ns_server:debug,2014-08-19T16:48:52.312,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,660,958,894,830,1022,762,698,996,932,868,804,736, 672,970,938,906,874,842,810,778,1002,742,710,678,646,976,944,912,880,848,816, 784,1008,748,716,684,652,982,950,918,886,854,822,790,1014,754,722,690,658, 988,956,924,892,860,828,796,1020,760,728,696,664,994,962,930,898,866,834,802, 770,766,734,702,670,968,936,904,872,840,808,776,1000,740,708,676,644,974,942, 910,878,846,814,782,1006,746,714,682,650,980,948,916,884,852,820,788,1012, 752,720,688,656,986,954,922,890,858,826,794,1018,758,726,694,662,992,960,928, 896,864,832,800,768,764,732,700,668,998,966,934,902,870,838,806,774,738,706, 674,642,972,940,908,876,844,812,780,1004,744,712,680,648,1023,978,946,914, 882,850,818,786,1010,718,654,952,888,824,1016,756,692,990,926,862,798,730, 666,964,900,836,772,704,640] [views:debug,2014-08-19T16:48:52.371,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/640. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:52.371,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",640,active,0} [ns_server:debug,2014-08-19T16:48:52.462,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 638. Nacking mccouch update. [views:debug,2014-08-19T16:48:52.462,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/638. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:52.463,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",638,active,0} [ns_server:debug,2014-08-19T16:48:52.463,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,660,958,894,830,1022,762,698,996,932,868,804,736, 672,970,938,906,874,842,810,778,1002,742,710,678,646,976,944,912,880,848,816, 784,1008,748,716,684,652,982,950,918,886,854,822,790,1014,754,722,690,658, 988,956,924,892,860,828,796,1020,760,728,696,664,994,962,930,898,866,834,802, 770,766,734,702,670,638,968,936,904,872,840,808,776,1000,740,708,676,644,974, 942,910,878,846,814,782,1006,746,714,682,650,980,948,916,884,852,820,788, 1012,752,720,688,656,986,954,922,890,858,826,794,1018,758,726,694,662,992, 960,928,896,864,832,800,768,764,732,700,668,998,966,934,902,870,838,806,774, 738,706,674,642,972,940,908,876,844,812,780,1004,744,712,680,648,1023,978, 946,914,882,850,818,786,1010,718,654,952,888,824,1016,756,692,990,926,862, 798,730,666,964,900,836,772,704,640] [views:debug,2014-08-19T16:48:52.521,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/638. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:52.521,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",638,active,0} [ns_server:debug,2014-08-19T16:48:52.588,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 636. Nacking mccouch update. [views:debug,2014-08-19T16:48:52.588,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/636. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:52.588,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",636,active,0} [ns_server:debug,2014-08-19T16:48:52.589,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,660,958,894,830,1022,762,698,996,932,868,804,736, 672,970,938,906,874,842,810,778,1002,742,710,678,646,976,944,912,880,848,816, 784,1008,748,716,684,652,982,950,918,886,854,822,790,1014,754,722,690,658, 988,956,924,892,860,828,796,1020,760,728,696,664,994,962,930,898,866,834,802, 770,766,734,702,670,638,968,936,904,872,840,808,776,1000,740,708,676,644,974, 942,910,878,846,814,782,1006,746,714,682,650,980,948,916,884,852,820,788, 1012,752,720,688,656,986,954,922,890,858,826,794,1018,758,726,694,662,992, 960,928,896,864,832,800,768,764,732,700,668,636,998,966,934,902,870,838,806, 774,738,706,674,642,972,940,908,876,844,812,780,1004,744,712,680,648,1023, 978,946,914,882,850,818,786,1010,718,654,952,888,824,1016,756,692,990,926, 862,798,730,666,964,900,836,772,704,640] [views:debug,2014-08-19T16:48:52.622,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/636. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:52.622,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",636,active,0} [ns_server:debug,2014-08-19T16:48:52.779,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 634. Nacking mccouch update. [views:debug,2014-08-19T16:48:52.779,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/634. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:52.779,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",634,active,0} [ns_server:debug,2014-08-19T16:48:52.780,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,660,958,894,830,1022,762,698,634,996,932,868,804, 736,672,970,906,842,778,742,710,678,646,976,944,912,880,848,816,784,1008,748, 716,684,652,982,950,918,886,854,822,790,1014,754,722,690,658,988,956,924,892, 860,828,796,1020,760,728,696,664,994,962,930,898,866,834,802,770,766,734,702, 670,638,968,936,904,872,840,808,776,1000,740,708,676,644,974,942,910,878,846, 814,782,1006,746,714,682,650,980,948,916,884,852,820,788,1012,752,720,688, 656,986,954,922,890,858,826,794,1018,758,726,694,662,992,960,928,896,864,832, 800,768,764,732,700,668,636,998,966,934,902,870,838,806,774,738,706,674,642, 972,940,908,876,844,812,780,1004,744,712,680,648,1023,978,946,914,882,850, 818,786,1010,718,654,952,888,824,1016,756,692,990,926,862,798,730,666,964, 900,836,772,704,640,938,874,810,1002] [views:debug,2014-08-19T16:48:52.830,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/634. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:52.830,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",634,active,0} [ns_server:debug,2014-08-19T16:48:52.972,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 632. Nacking mccouch update. [views:debug,2014-08-19T16:48:52.972,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/632. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:52.972,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",632,active,0} [ns_server:debug,2014-08-19T16:48:52.972,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,660,958,894,830,1022,762,698,634,996,932,868,804, 736,672,970,906,842,778,742,710,678,646,976,944,912,880,848,816,784,1008,748, 716,684,652,982,950,918,886,854,822,790,1014,754,722,690,658,988,956,924,892, 860,828,796,1020,760,728,696,664,632,994,962,930,898,866,834,802,770,766,734, 702,670,638,968,936,904,872,840,808,776,1000,740,708,676,644,974,942,910,878, 846,814,782,1006,746,714,682,650,980,948,916,884,852,820,788,1012,752,720, 688,656,986,954,922,890,858,826,794,1018,758,726,694,662,992,960,928,896,864, 832,800,768,764,732,700,668,636,998,966,934,902,870,838,806,774,738,706,674, 642,972,940,908,876,844,812,780,1004,744,712,680,648,1023,978,946,914,882, 850,818,786,1010,718,654,952,888,824,1016,756,692,990,926,862,798,730,666, 964,900,836,772,704,640,938,874,810,1002] [views:debug,2014-08-19T16:48:53.022,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/632. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:53.024,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",632,active,0} [ns_server:debug,2014-08-19T16:48:53.173,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 630. Nacking mccouch update. [views:debug,2014-08-19T16:48:53.173,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/630. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:53.173,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",630,active,0} [ns_server:debug,2014-08-19T16:48:53.173,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,660,958,894,830,1022,762,698,634,996,932,868,804, 736,672,970,906,842,778,742,710,678,646,976,944,912,880,848,816,784,1008,748, 716,684,652,982,950,918,886,854,822,790,1014,754,722,690,658,988,956,924,892, 860,828,796,1020,760,728,696,664,632,994,962,930,898,866,834,802,770,766,734, 702,670,638,968,936,904,872,840,808,776,1000,740,708,676,644,974,942,910,878, 846,814,782,1006,746,714,682,650,980,948,916,884,852,820,788,1012,752,720, 688,656,986,954,922,890,858,826,794,1018,758,726,694,662,630,992,960,928,896, 864,832,800,768,764,732,700,668,636,998,966,934,902,870,838,806,774,738,706, 674,642,972,940,908,876,844,812,780,1004,744,712,680,648,1023,978,946,914, 882,850,818,786,1010,718,654,952,888,824,1016,756,692,990,926,862,798,730, 666,964,900,836,772,704,640,938,874,810,1002] [views:debug,2014-08-19T16:48:53.223,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/630. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:53.223,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",630,active,0} [ns_server:debug,2014-08-19T16:48:53.373,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 628. Nacking mccouch update. [views:debug,2014-08-19T16:48:53.373,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/628. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:53.373,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",628,active,0} [ns_server:debug,2014-08-19T16:48:53.374,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,660,958,894,830,1022,762,698,634,996,932,868,804, 736,672,970,906,842,778,742,710,678,646,976,944,912,880,848,816,784,1008,748, 716,684,652,982,950,918,886,854,822,790,1014,754,722,690,658,988,956,924,892, 860,828,796,1020,760,728,696,664,632,994,962,930,898,866,834,802,770,766,734, 702,670,638,968,936,904,872,840,808,776,1000,740,708,676,644,974,942,910,878, 846,814,782,1006,746,714,682,650,980,948,916,884,852,820,788,1012,752,720, 688,656,986,954,922,890,858,826,794,1018,758,726,694,662,630,992,960,928,896, 864,832,800,768,764,732,700,668,636,998,966,934,902,870,838,806,774,738,706, 674,642,972,940,908,876,844,812,780,1004,744,712,680,648,1023,978,946,914, 882,850,818,786,1010,718,654,952,888,824,1016,756,692,628,990,926,862,798, 730,666,964,900,836,772,704,640,938,874,810,1002] [views:debug,2014-08-19T16:48:53.449,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/628. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:53.449,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",628,active,0} [ns_server:debug,2014-08-19T16:48:53.589,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 626. Nacking mccouch update. [views:debug,2014-08-19T16:48:53.589,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/626. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:53.589,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",626,active,0} [ns_server:debug,2014-08-19T16:48:53.589,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,660,958,894,830,1022,762,698,634,996,932,868,804, 736,672,970,906,842,778,742,710,678,646,976,944,912,880,848,816,784,1008,748, 716,684,652,982,950,918,886,854,822,790,1014,754,722,690,658,626,988,956,924, 892,860,828,796,1020,760,728,696,664,632,994,962,930,898,866,834,802,770,766, 734,702,670,638,968,936,904,872,840,808,776,1000,740,708,676,644,974,942,910, 878,846,814,782,1006,746,714,682,650,980,948,916,884,852,820,788,1012,752, 720,688,656,986,954,922,890,858,826,794,1018,758,726,694,662,630,992,960,928, 896,864,832,800,768,764,732,700,668,636,998,966,934,902,870,838,806,774,738, 706,674,642,972,940,908,876,844,812,780,1004,744,712,680,648,1023,978,946, 914,882,850,818,786,1010,718,654,952,888,824,1016,756,692,628,990,926,862, 798,730,666,964,900,836,772,704,640,938,874,810,1002] [views:debug,2014-08-19T16:48:53.623,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/626. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:53.623,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",626,active,0} [ns_server:debug,2014-08-19T16:48:53.691,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 624. Nacking mccouch update. [views:debug,2014-08-19T16:48:53.691,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/624. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:53.691,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",624,active,0} [ns_server:debug,2014-08-19T16:48:53.691,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,984,920,856,792,724,660,958,894,830,1022,762,698,634,996,932,868,804, 736,672,970,906,842,778,710,646,976,944,912,880,848,816,784,1008,748,716,684, 652,982,950,918,886,854,822,790,1014,754,722,690,658,626,988,956,924,892,860, 828,796,1020,760,728,696,664,632,994,962,930,898,866,834,802,770,766,734,702, 670,638,968,936,904,872,840,808,776,1000,740,708,676,644,974,942,910,878,846, 814,782,1006,746,714,682,650,980,948,916,884,852,820,788,1012,752,720,688, 656,624,986,954,922,890,858,826,794,1018,758,726,694,662,630,992,960,928,896, 864,832,800,768,764,732,700,668,636,998,966,934,902,870,838,806,774,738,706, 674,642,972,940,908,876,844,812,780,1004,744,712,680,648,1023,978,946,914, 882,850,818,786,1010,718,654,952,888,824,1016,756,692,628,990,926,862,798, 730,666,964,900,836,772,704,640,938,874,810,1002,742,678] [views:debug,2014-08-19T16:48:53.725,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/624. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:53.725,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",624,active,0} [ns_server:debug,2014-08-19T16:48:53.791,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 622. Nacking mccouch update. [views:debug,2014-08-19T16:48:53.792,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/622. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:53.792,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",622,active,0} [ns_server:debug,2014-08-19T16:48:53.792,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,958,894,830,1022,762,698,634,996,932,868, 804,736,672,970,906,842,778,710,646,976,944,912,880,848,816,784,1008,748,716, 684,652,982,950,918,886,854,822,790,1014,754,722,690,658,626,988,956,924,892, 860,828,796,1020,760,728,696,664,632,994,962,930,898,866,834,802,770,766,734, 702,670,638,968,936,904,872,840,808,776,1000,740,708,676,644,974,942,910,878, 846,814,782,1006,746,714,682,650,980,948,916,884,852,820,788,1012,752,720, 688,656,624,986,954,922,890,858,826,794,1018,758,726,694,662,630,992,960,928, 896,864,832,800,768,764,732,700,668,636,998,966,934,902,870,838,806,774,738, 706,674,642,972,940,908,876,844,812,780,1004,744,712,680,648,1023,978,946, 914,882,850,818,786,1010,718,654,952,888,824,1016,756,692,628,990,926,862, 798,730,666,964,900,836,772,704,640,938,874,810,1002,742,678] [views:debug,2014-08-19T16:48:53.825,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/622. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:53.826,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",622,active,0} [ns_server:debug,2014-08-19T16:48:53.909,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 620. Nacking mccouch update. [views:debug,2014-08-19T16:48:53.909,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/620. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:53.909,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",620,active,0} [ns_server:debug,2014-08-19T16:48:53.910,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,958,894,830,1022,762,698,634,996,932,868, 804,736,672,970,906,842,778,710,646,976,944,912,880,848,816,784,1008,748,716, 684,652,620,982,950,918,886,854,822,790,1014,754,722,690,658,626,988,956,924, 892,860,828,796,1020,760,728,696,664,632,994,962,930,898,866,834,802,770,766, 734,702,670,638,968,936,904,872,840,808,776,1000,740,708,676,644,974,942,910, 878,846,814,782,1006,746,714,682,650,980,948,916,884,852,820,788,1012,752, 720,688,656,624,986,954,922,890,858,826,794,1018,758,726,694,662,630,992,960, 928,896,864,832,800,768,764,732,700,668,636,998,966,934,902,870,838,806,774, 738,706,674,642,972,940,908,876,844,812,780,1004,744,712,680,648,1023,978, 946,914,882,850,818,786,1010,718,654,952,888,824,1016,756,692,628,990,926, 862,798,730,666,964,900,836,772,704,640,938,874,810,1002,742,678] [views:debug,2014-08-19T16:48:53.960,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/620. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:53.960,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",620,active,0} [ns_server:debug,2014-08-19T16:48:54.043,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 618. Nacking mccouch update. [views:debug,2014-08-19T16:48:54.043,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/618. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:54.044,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",618,active,0} [ns_server:debug,2014-08-19T16:48:54.044,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,958,894,830,1022,762,698,634,996,932,868, 804,736,672,970,906,842,778,710,646,976,944,912,880,848,816,784,1008,748,716, 684,652,620,982,950,918,886,854,822,790,1014,754,722,690,658,626,988,956,924, 892,860,828,796,1020,760,728,696,664,632,994,962,930,898,866,834,802,770,766, 734,702,670,638,968,936,904,872,840,808,776,1000,740,708,676,644,974,942,910, 878,846,814,782,1006,746,714,682,650,618,980,948,916,884,852,820,788,1012, 752,720,688,656,624,986,954,922,890,858,826,794,1018,758,726,694,662,630,992, 960,928,896,864,832,800,768,764,732,700,668,636,998,966,934,902,870,838,806, 774,738,706,674,642,972,940,908,876,844,812,780,1004,744,712,680,648,1023, 978,946,914,882,850,818,786,1010,718,654,952,888,824,1016,756,692,628,990, 926,862,798,730,666,964,900,836,772,704,640,938,874,810,1002,742,678] [views:debug,2014-08-19T16:48:54.116,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/618. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:54.116,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",618,active,0} [ns_server:debug,2014-08-19T16:48:54.283,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 616. Nacking mccouch update. [views:debug,2014-08-19T16:48:54.283,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/616. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:54.283,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",616,active,0} [ns_server:debug,2014-08-19T16:48:54.284,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,958,894,830,1022,762,698,634,996,932,868, 804,736,672,970,906,842,778,710,646,976,944,912,880,848,816,784,1008,748,716, 684,652,620,982,950,918,886,854,822,790,1014,754,722,690,658,626,988,956,924, 892,860,828,796,1020,760,728,696,664,632,994,962,930,898,866,834,802,770,766, 734,702,670,638,968,936,904,872,840,808,776,1000,740,708,676,644,974,942,910, 878,846,814,782,1006,746,714,682,650,618,980,948,916,884,852,820,788,1012, 752,720,688,656,624,986,954,922,890,858,826,794,1018,758,726,694,662,630,992, 960,928,896,864,832,800,768,764,732,700,668,636,998,966,934,902,870,838,806, 774,738,706,674,642,972,940,908,876,844,812,780,1004,744,712,680,648,616, 1023,978,946,914,882,850,818,786,1010,718,654,952,888,824,1016,756,692,628, 990,926,862,798,730,666,964,900,836,772,704,640,938,874,810,1002,742,678] [views:debug,2014-08-19T16:48:54.342,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/616. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:54.342,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",616,active,0} [ns_server:debug,2014-08-19T16:48:54.510,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 614. Nacking mccouch update. [views:debug,2014-08-19T16:48:54.510,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/614. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:54.510,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",614,active,0} [ns_server:debug,2014-08-19T16:48:54.511,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,958,894,830,1022,762,698,634,996,932,868, 804,736,672,970,906,842,778,710,646,944,880,816,1008,748,716,684,652,620,982, 950,918,886,854,822,790,1014,754,722,690,658,626,988,956,924,892,860,828,796, 1020,760,728,696,664,632,994,962,930,898,866,834,802,770,766,734,702,670,638, 968,936,904,872,840,808,776,1000,740,708,676,644,974,942,910,878,846,814,782, 1006,746,714,682,650,618,980,948,916,884,852,820,788,1012,752,720,688,656, 624,986,954,922,890,858,826,794,1018,758,726,694,662,630,992,960,928,896,864, 832,800,768,764,732,700,668,636,998,966,934,902,870,838,806,774,738,706,674, 642,972,940,908,876,844,812,780,1004,744,712,680,648,616,1023,978,946,914, 882,850,818,786,1010,718,654,952,888,824,1016,756,692,628,990,926,862,798, 730,666,964,900,836,772,704,640,938,874,810,1002,742,678,614,976,912,848,784] [views:debug,2014-08-19T16:48:54.585,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/614. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:54.586,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",614,active,0} [ns_server:debug,2014-08-19T16:48:54.761,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 612. Nacking mccouch update. [views:debug,2014-08-19T16:48:54.761,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/612. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:54.761,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",612,active,0} [ns_server:debug,2014-08-19T16:48:54.762,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,958,894,830,1022,762,698,634,996,932,868, 804,736,672,970,906,842,778,710,646,944,880,816,1008,748,716,684,652,620,982, 950,918,886,854,822,790,1014,754,722,690,658,626,988,956,924,892,860,828,796, 1020,760,728,696,664,632,994,962,930,898,866,834,802,770,766,734,702,670,638, 968,936,904,872,840,808,776,1000,740,708,676,644,612,974,942,910,878,846,814, 782,1006,746,714,682,650,618,980,948,916,884,852,820,788,1012,752,720,688, 656,624,986,954,922,890,858,826,794,1018,758,726,694,662,630,992,960,928,896, 864,832,800,768,764,732,700,668,636,998,966,934,902,870,838,806,774,738,706, 674,642,972,940,908,876,844,812,780,1004,744,712,680,648,616,1023,978,946, 914,882,850,818,786,1010,718,654,952,888,824,1016,756,692,628,990,926,862, 798,730,666,964,900,836,772,704,640,938,874,810,1002,742,678,614,976,912,848, 784] [views:debug,2014-08-19T16:48:54.845,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/612. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:54.845,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",612,active,0} [ns_server:debug,2014-08-19T16:48:55.012,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 610. Nacking mccouch update. [views:debug,2014-08-19T16:48:55.012,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/610. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:55.012,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",610,active,0} [ns_server:debug,2014-08-19T16:48:55.012,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,958,894,830,1022,762,698,634,996,932,868, 804,736,672,970,906,842,778,710,646,944,880,816,1008,748,716,684,652,620,982, 950,918,886,854,822,790,1014,754,722,690,658,626,988,956,924,892,860,828,796, 1020,760,728,696,664,632,994,962,930,898,866,834,802,770,766,734,702,670,638, 968,936,904,872,840,808,776,1000,740,708,676,644,612,974,942,910,878,846,814, 782,1006,746,714,682,650,618,980,948,916,884,852,820,788,1012,752,720,688, 656,624,986,954,922,890,858,826,794,1018,758,726,694,662,630,992,960,928,896, 864,832,800,768,764,732,700,668,636,998,966,934,902,870,838,806,774,738,706, 674,642,610,972,940,908,876,844,812,780,1004,744,712,680,648,616,1023,978, 946,914,882,850,818,786,1010,718,654,952,888,824,1016,756,692,628,990,926, 862,798,730,666,964,900,836,772,704,640,938,874,810,1002,742,678,614,976,912, 848,784] [views:debug,2014-08-19T16:48:55.096,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/610. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:55.096,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",610,active,0} [ns_server:debug,2014-08-19T16:48:55.194,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 608. Nacking mccouch update. [views:debug,2014-08-19T16:48:55.194,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/608. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:55.194,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",608,active,0} [ns_server:debug,2014-08-19T16:48:55.194,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,958,894,830,1022,762,698,634,996,932,868, 804,736,672,608,970,906,842,778,710,646,944,880,816,1008,748,716,684,652,620, 982,950,918,886,854,822,790,1014,754,722,690,658,626,988,956,924,892,860,828, 796,1020,760,728,696,664,632,994,962,930,898,866,834,802,770,766,734,702,670, 638,968,936,904,872,840,808,776,1000,740,708,676,644,612,974,942,910,878,846, 814,782,1006,746,714,682,650,618,980,948,916,884,852,820,788,1012,752,720, 688,656,624,986,954,922,890,858,826,794,1018,758,726,694,662,630,992,960,928, 896,864,832,800,768,764,732,700,668,636,998,966,934,902,870,838,806,774,738, 706,674,642,610,972,940,908,876,844,812,780,1004,744,712,680,648,616,1023, 978,946,914,882,850,818,786,1010,718,654,952,888,824,1016,756,692,628,990, 926,862,798,730,666,964,900,836,772,704,640,938,874,810,1002,742,678,614,976, 912,848,784] [views:debug,2014-08-19T16:48:55.228,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/608. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:55.228,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",608,active,0} [ns_server:debug,2014-08-19T16:48:55.294,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 606. Nacking mccouch update. [views:debug,2014-08-19T16:48:55.294,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/606. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:55.295,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",606,active,0} [ns_server:debug,2014-08-19T16:48:55.295,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,958,894,830,1022,762,698,634,996,932,868, 804,736,672,608,970,906,842,778,710,646,944,880,816,1008,748,716,684,652,620, 982,950,918,886,854,822,790,1014,754,722,690,658,626,988,956,924,892,860,828, 796,1020,760,728,696,664,632,994,962,930,898,866,834,802,770,766,734,702,670, 638,606,968,936,904,872,840,808,776,1000,740,708,676,644,612,974,942,910,878, 846,814,782,1006,746,714,682,650,618,980,948,916,884,852,820,788,1012,752, 720,688,656,624,986,954,922,890,858,826,794,1018,758,726,694,662,630,992,960, 928,896,864,832,800,768,764,732,700,668,636,998,966,934,902,870,838,806,774, 738,706,674,642,610,972,940,908,876,844,812,780,1004,744,712,680,648,616, 1023,978,946,914,882,850,818,786,1010,718,654,952,888,824,1016,756,692,628, 990,926,862,798,730,666,964,900,836,772,704,640,938,874,810,1002,742,678,614, 976,912,848,784] [views:debug,2014-08-19T16:48:55.329,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/606. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:55.329,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",606,active,0} [ns_server:debug,2014-08-19T16:48:55.420,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 604. Nacking mccouch update. [views:debug,2014-08-19T16:48:55.420,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/604. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:55.420,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",604,active,0} [ns_server:debug,2014-08-19T16:48:55.421,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,958,894,830,1022,762,698,634,996,932,868, 804,736,672,608,970,906,842,778,710,646,944,880,816,1008,748,684,620,982,950, 918,886,854,822,790,1014,754,722,690,658,626,988,956,924,892,860,828,796, 1020,760,728,696,664,632,994,962,930,898,866,834,802,770,766,734,702,670,638, 606,968,936,904,872,840,808,776,1000,740,708,676,644,612,974,942,910,878,846, 814,782,1006,746,714,682,650,618,980,948,916,884,852,820,788,1012,752,720, 688,656,624,986,954,922,890,858,826,794,1018,758,726,694,662,630,992,960,928, 896,864,832,800,768,764,732,700,668,636,604,998,966,934,902,870,838,806,774, 738,706,674,642,610,972,940,908,876,844,812,780,1004,744,712,680,648,616, 1023,978,946,914,882,850,818,786,1010,718,654,952,888,824,1016,756,692,628, 990,926,862,798,730,666,964,900,836,772,704,640,938,874,810,1002,742,678,614, 976,912,848,784,716,652] [views:debug,2014-08-19T16:48:55.480,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/604. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:55.480,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",604,active,0} [ns_server:debug,2014-08-19T16:48:55.571,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 602. Nacking mccouch update. [views:debug,2014-08-19T16:48:55.571,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/602. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:55.571,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",602,active,0} [ns_server:debug,2014-08-19T16:48:55.572,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,958,894,830,1022,762,698,634,996,932,868, 804,736,672,608,970,906,842,778,710,646,944,880,816,1008,748,684,620,982,950, 918,886,854,822,790,1014,754,722,690,658,626,988,956,924,892,860,828,796, 1020,760,728,696,664,632,994,962,930,898,866,834,802,770,766,734,702,670,638, 606,968,936,904,872,840,808,776,1000,740,708,676,644,612,974,942,910,878,846, 814,782,1006,746,714,682,650,618,980,948,916,884,852,820,788,1012,752,720, 688,656,624,986,954,922,890,858,826,794,1018,758,726,694,662,630,992,960,928, 896,864,832,800,768,764,732,700,668,636,604,998,966,934,902,870,838,806,774, 738,706,674,642,610,972,940,908,876,844,812,780,1004,744,712,680,648,616, 1023,978,946,914,882,850,818,786,1010,718,654,952,888,824,1016,756,692,628, 990,926,862,798,730,666,602,964,900,836,772,704,640,938,874,810,1002,742,678, 614,976,912,848,784,716,652] [views:debug,2014-08-19T16:48:55.630,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/602. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:55.630,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",602,active,0} [ns_server:debug,2014-08-19T16:48:55.729,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 600. Nacking mccouch update. [views:debug,2014-08-19T16:48:55.729,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/600. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:55.729,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",600,active,0} [ns_server:debug,2014-08-19T16:48:55.730,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,958,894,830,1022,762,698,634,996,932,868, 804,736,672,608,970,906,842,778,710,646,944,880,816,1008,748,684,620,982,950, 918,886,854,822,790,1014,754,722,690,658,626,988,956,924,892,860,828,796, 1020,760,728,696,664,632,600,994,962,930,898,866,834,802,770,766,734,702,670, 638,606,968,936,904,872,840,808,776,1000,740,708,676,644,612,974,942,910,878, 846,814,782,1006,746,714,682,650,618,980,948,916,884,852,820,788,1012,752, 720,688,656,624,986,954,922,890,858,826,794,1018,758,726,694,662,630,992,960, 928,896,864,832,800,768,764,732,700,668,636,604,998,966,934,902,870,838,806, 774,738,706,674,642,610,972,940,908,876,844,812,780,1004,744,712,680,648,616, 1023,978,946,914,882,850,818,786,1010,718,654,952,888,824,1016,756,692,628, 990,926,862,798,730,666,602,964,900,836,772,704,640,938,874,810,1002,742,678, 614,976,912,848,784,716,652] [views:debug,2014-08-19T16:48:55.796,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/600. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:55.796,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",600,active,0} [ns_server:debug,2014-08-19T16:48:55.971,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 598. Nacking mccouch update. [views:debug,2014-08-19T16:48:55.971,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/598. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:55.972,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",598,active,0} [ns_server:debug,2014-08-19T16:48:55.972,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,958,894,830,1022,762,698,634,996,932,868, 804,736,672,608,970,906,842,778,710,646,944,880,816,1008,748,684,620,982,950, 918,886,854,822,790,1014,754,722,690,658,626,988,956,924,892,860,828,796, 1020,760,728,696,664,632,600,994,962,930,898,866,834,802,770,766,734,702,670, 638,606,968,936,904,872,840,808,776,1000,740,708,676,644,612,974,942,910,878, 846,814,782,1006,746,714,682,650,618,980,948,916,884,852,820,788,1012,752, 720,688,656,624,986,954,922,890,858,826,794,1018,758,726,694,662,630,598,992, 960,928,896,864,832,800,768,764,732,700,668,636,604,998,966,934,902,870,838, 806,774,738,706,674,642,610,972,940,908,876,844,812,780,1004,744,712,680,648, 616,1023,978,946,914,882,850,818,786,1010,718,654,952,888,824,1016,756,692, 628,990,926,862,798,730,666,602,964,900,836,772,704,640,938,874,810,1002,742, 678,614,976,912,848,784,716,652] [views:debug,2014-08-19T16:48:56.030,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/598. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:56.031,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",598,active,0} [ns_server:debug,2014-08-19T16:48:56.205,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 596. Nacking mccouch update. [views:debug,2014-08-19T16:48:56.205,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/596. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:56.206,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",596,active,0} [ns_server:debug,2014-08-19T16:48:56.206,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,996,932, 868,804,736,672,608,970,906,842,778,710,646,944,880,816,1008,748,684,620,982, 950,918,886,854,822,790,1014,754,722,690,658,626,988,956,924,892,860,828,796, 1020,760,728,696,664,632,600,994,962,930,898,866,834,802,770,766,734,702,670, 638,606,968,936,904,872,840,808,776,1000,740,708,676,644,612,974,942,910,878, 846,814,782,1006,746,714,682,650,618,980,948,916,884,852,820,788,1012,752, 720,688,656,624,986,954,922,890,858,826,794,1018,758,726,694,662,630,598,992, 960,928,896,864,832,800,768,764,732,700,668,636,604,998,966,934,902,870,838, 806,774,738,706,674,642,610,972,940,908,876,844,812,780,1004,744,712,680,648, 616,1023,978,946,914,882,850,818,786,1010,718,654,952,888,824,1016,756,692, 628,990,926,862,798,730,666,602,964,900,836,772,704,640,938,874,810,1002,742, 678,614,976,912,848,784,716,652] [views:debug,2014-08-19T16:48:56.290,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/596. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:56.290,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",596,active,0} [ns_server:debug,2014-08-19T16:48:56.441,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2014-08-19T16:48:56.444,ns_1@10.242.238.88:<0.22503.0>:compaction_daemon:check_all_dbs_exist:1611]Skipping compaction of bucket `default` since at least database `default/0` seems to be missing. [ns_server:debug,2014-08-19T16:48:56.444,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:48:56.444,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:48:56.465,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 594. Nacking mccouch update. [views:debug,2014-08-19T16:48:56.465,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/594. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:56.465,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",594,active,0} [ns_server:debug,2014-08-19T16:48:56.465,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,996,932, 868,804,736,672,608,970,906,842,778,710,646,944,880,816,1008,748,684,620,982, 918,854,790,754,722,690,658,626,594,988,956,924,892,860,828,796,1020,760,728, 696,664,632,600,994,962,930,898,866,834,802,770,766,734,702,670,638,606,968, 936,904,872,840,808,776,1000,740,708,676,644,612,974,942,910,878,846,814,782, 1006,746,714,682,650,618,980,948,916,884,852,820,788,1012,752,720,688,656, 624,986,954,922,890,858,826,794,1018,758,726,694,662,630,598,992,960,928,896, 864,832,800,768,764,732,700,668,636,604,998,966,934,902,870,838,806,774,738, 706,674,642,610,972,940,908,876,844,812,780,1004,744,712,680,648,616,1023, 978,946,914,882,850,818,786,1010,718,654,952,888,824,1016,756,692,628,990, 926,862,798,730,666,602,964,900,836,772,704,640,938,874,810,1002,742,678,614, 976,912,848,784,716,652,950,886,822,1014] [views:debug,2014-08-19T16:48:56.498,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/594. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:56.498,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",594,active,0} [ns_server:debug,2014-08-19T16:48:56.565,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 592. Nacking mccouch update. [views:debug,2014-08-19T16:48:56.565,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/592. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:56.566,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",592,active,0} [ns_server:debug,2014-08-19T16:48:56.566,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,996,932, 868,804,736,672,608,970,906,842,778,710,646,944,880,816,1008,748,684,620,982, 918,854,790,754,722,690,658,626,594,988,956,924,892,860,828,796,1020,760,728, 696,664,632,600,994,962,930,898,866,834,802,770,766,734,702,670,638,606,968, 936,904,872,840,808,776,1000,740,708,676,644,612,974,942,910,878,846,814,782, 1006,746,714,682,650,618,980,948,916,884,852,820,788,1012,752,720,688,656, 624,592,986,954,922,890,858,826,794,1018,758,726,694,662,630,598,992,960,928, 896,864,832,800,768,764,732,700,668,636,604,998,966,934,902,870,838,806,774, 738,706,674,642,610,972,940,908,876,844,812,780,1004,744,712,680,648,616, 1023,978,946,914,882,850,818,786,1010,718,654,952,888,824,1016,756,692,628, 990,926,862,798,730,666,602,964,900,836,772,704,640,938,874,810,1002,742,678, 614,976,912,848,784,716,652,950,886,822,1014] [views:debug,2014-08-19T16:48:56.599,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/592. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:56.599,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",592,active,0} [ns_server:debug,2014-08-19T16:48:56.747,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 590. Nacking mccouch update. [views:debug,2014-08-19T16:48:56.747,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/590. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:56.747,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",590,active,0} [ns_server:debug,2014-08-19T16:48:56.748,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,996,932, 868,804,736,672,608,970,906,842,778,710,646,944,880,816,1008,748,684,620,982, 918,854,790,754,722,690,658,626,594,988,956,924,892,860,828,796,1020,760,728, 696,664,632,600,994,962,930,898,866,834,802,770,766,734,702,670,638,606,968, 936,904,872,840,808,776,1000,740,708,676,644,612,974,942,910,878,846,814,782, 1006,746,714,682,650,618,980,948,916,884,852,820,788,1012,752,720,688,656, 624,592,986,954,922,890,858,826,794,1018,758,726,694,662,630,598,992,960,928, 896,864,832,800,768,764,732,700,668,636,604,998,966,934,902,870,838,806,774, 738,706,674,642,610,972,940,908,876,844,812,780,1004,744,712,680,648,616, 1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692, 628,990,926,862,798,730,666,602,964,900,836,772,704,640,938,874,810,1002,742, 678,614,976,912,848,784,716,652,950,886,822,1014] [views:debug,2014-08-19T16:48:56.823,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/590. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:56.823,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",590,active,0} [ns_server:debug,2014-08-19T16:48:56.973,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 588. Nacking mccouch update. [views:debug,2014-08-19T16:48:56.973,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/588. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:56.973,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",588,active,0} [ns_server:debug,2014-08-19T16:48:56.974,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,996,932, 868,804,736,672,608,970,906,842,778,710,646,944,880,816,1008,748,684,620,982, 918,854,790,754,722,690,658,626,594,988,956,924,892,860,828,796,1020,760,728, 696,664,632,600,994,962,930,898,866,834,802,770,766,734,702,670,638,606,968, 936,904,872,840,808,776,1000,740,708,676,644,612,974,942,910,878,846,814,782, 1006,746,714,682,650,618,980,948,916,884,852,820,788,1012,752,720,688,656, 624,592,986,954,922,890,858,826,794,1018,758,726,694,662,630,598,992,960,928, 896,864,832,800,768,764,732,700,668,636,604,998,966,934,902,870,838,806,774, 738,706,674,642,610,972,940,908,876,844,812,780,1004,744,712,680,648,616, 1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692, 628,990,926,862,798,730,666,602,964,900,836,772,704,640,938,874,810,1002,742, 678,614,976,912,848,784,716,652,588,950,886,822,1014] [views:debug,2014-08-19T16:48:57.048,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/588. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:57.049,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",588,active,0} [ns_server:debug,2014-08-19T16:48:57.215,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 586. Nacking mccouch update. [views:debug,2014-08-19T16:48:57.215,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/586. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:57.216,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",586,active,0} [ns_server:debug,2014-08-19T16:48:57.216,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,996,932, 868,804,736,672,608,970,906,842,778,710,646,944,880,816,1008,748,684,620,982, 918,854,790,754,722,690,658,626,594,988,956,924,892,860,828,796,1020,760,728, 696,664,632,600,994,962,930,898,866,834,802,770,766,734,702,670,638,606,968, 936,904,872,840,808,776,1000,740,708,676,644,612,974,942,910,878,846,814,782, 1006,746,714,682,650,618,586,980,948,916,884,852,820,788,1012,752,720,688, 656,624,592,986,954,922,890,858,826,794,1018,758,726,694,662,630,598,992,960, 928,896,864,832,800,768,764,732,700,668,636,604,998,966,934,902,870,838,806, 774,738,706,674,642,610,972,940,908,876,844,812,780,1004,744,712,680,648,616, 1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692, 628,990,926,862,798,730,666,602,964,900,836,772,704,640,938,874,810,1002,742, 678,614,976,912,848,784,716,652,588,950,886,822,1014] [views:debug,2014-08-19T16:48:57.301,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/586. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:57.301,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",586,active,0} [ns_server:debug,2014-08-19T16:48:57.467,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 584. Nacking mccouch update. [views:debug,2014-08-19T16:48:57.467,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/584. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:57.467,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",584,active,0} [ns_server:debug,2014-08-19T16:48:57.468,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,996,932, 868,804,736,672,608,970,906,842,778,710,646,944,880,816,1008,748,684,620,982, 918,854,790,722,658,594,988,956,924,892,860,828,796,1020,760,728,696,664,632, 600,994,962,930,898,866,834,802,770,766,734,702,670,638,606,968,936,904,872, 840,808,776,1000,740,708,676,644,612,974,942,910,878,846,814,782,1006,746, 714,682,650,618,586,980,948,916,884,852,820,788,1012,752,720,688,656,624,592, 986,954,922,890,858,826,794,1018,758,726,694,662,630,598,992,960,928,896,864, 832,800,768,764,732,700,668,636,604,998,966,934,902,870,838,806,774,738,706, 674,642,610,972,940,908,876,844,812,780,1004,744,712,680,648,616,584,1023, 978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692,628, 990,926,862,798,730,666,602,964,900,836,772,704,640,938,874,810,1002,742,678, 614,976,912,848,784,716,652,588,950,886,822,1014,754,690,626] [views:debug,2014-08-19T16:48:57.543,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/584. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:57.543,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",584,active,0} [ns_server:debug,2014-08-19T16:48:57.691,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 582. Nacking mccouch update. [views:debug,2014-08-19T16:48:57.691,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/582. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:57.691,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",582,active,0} [ns_server:debug,2014-08-19T16:48:57.691,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,996,932, 868,804,736,672,608,970,906,842,778,710,646,582,944,880,816,1008,748,684,620, 982,918,854,790,722,658,594,988,956,924,892,860,828,796,1020,760,728,696,664, 632,600,994,962,930,898,866,834,802,770,766,734,702,670,638,606,968,936,904, 872,840,808,776,1000,740,708,676,644,612,974,942,910,878,846,814,782,1006, 746,714,682,650,618,586,980,948,916,884,852,820,788,1012,752,720,688,656,624, 592,986,954,922,890,858,826,794,1018,758,726,694,662,630,598,992,960,928,896, 864,832,800,768,764,732,700,668,636,604,998,966,934,902,870,838,806,774,738, 706,674,642,610,972,940,908,876,844,812,780,1004,744,712,680,648,616,584, 1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692, 628,990,926,862,798,730,666,602,964,900,836,772,704,640,938,874,810,1002,742, 678,614,976,912,848,784,716,652,588,950,886,822,1014,754,690,626] [views:debug,2014-08-19T16:48:57.725,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/582. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:57.725,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",582,active,0} [ns_server:debug,2014-08-19T16:48:57.817,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 580. Nacking mccouch update. [views:debug,2014-08-19T16:48:57.817,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/580. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:57.817,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",580,active,0} [ns_server:debug,2014-08-19T16:48:57.817,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,996,932, 868,804,736,672,608,970,906,842,778,710,646,582,944,880,816,1008,748,684,620, 982,918,854,790,722,658,594,988,956,924,892,860,828,796,1020,760,728,696,664, 632,600,994,962,930,898,866,834,802,770,766,734,702,670,638,606,968,936,904, 872,840,808,776,1000,740,708,676,644,612,580,974,942,910,878,846,814,782, 1006,746,714,682,650,618,586,980,948,916,884,852,820,788,1012,752,720,688, 656,624,592,986,954,922,890,858,826,794,1018,758,726,694,662,630,598,992,960, 928,896,864,832,800,768,764,732,700,668,636,604,998,966,934,902,870,838,806, 774,738,706,674,642,610,972,940,908,876,844,812,780,1004,744,712,680,648,616, 584,1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756, 692,628,990,926,862,798,730,666,602,964,900,836,772,704,640,938,874,810,1002, 742,678,614,976,912,848,784,716,652,588,950,886,822,1014,754,690,626] [views:debug,2014-08-19T16:48:57.850,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/580. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:57.851,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",580,active,0} [ns_server:debug,2014-08-19T16:48:57.917,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 578. Nacking mccouch update. [views:debug,2014-08-19T16:48:57.917,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/578. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:57.918,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",578,active,0} [ns_server:debug,2014-08-19T16:48:57.918,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,996,932, 868,804,736,672,608,970,906,842,778,710,646,582,944,880,816,1008,748,684,620, 982,918,854,790,722,658,594,988,956,924,892,860,828,796,1020,760,728,696,664, 632,600,994,962,930,898,866,834,802,770,766,734,702,670,638,606,968,936,904, 872,840,808,776,1000,740,708,676,644,612,580,974,942,910,878,846,814,782, 1006,746,714,682,650,618,586,980,948,916,884,852,820,788,1012,752,720,688, 656,624,592,986,954,922,890,858,826,794,1018,758,726,694,662,630,598,992,960, 928,896,864,832,800,768,764,732,700,668,636,604,998,966,934,902,870,838,806, 774,738,706,674,642,610,578,972,940,908,876,844,812,780,1004,744,712,680,648, 616,584,1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016, 756,692,628,990,926,862,798,730,666,602,964,900,836,772,704,640,938,874,810, 1002,742,678,614,976,912,848,784,716,652,588,950,886,822,1014,754,690,626] [views:debug,2014-08-19T16:48:57.951,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/578. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:57.951,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",578,active,0} [ns_server:debug,2014-08-19T16:48:58.019,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 576. Nacking mccouch update. [views:debug,2014-08-19T16:48:58.020,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/576. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:58.020,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",576,active,0} [ns_server:debug,2014-08-19T16:48:58.020,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,996,932, 868,804,736,672,608,970,906,842,778,710,646,582,944,880,816,1008,748,684,620, 982,918,854,790,722,658,594,988,956,924,892,860,828,796,1020,760,728,696,664, 632,600,994,962,930,898,866,834,802,770,766,734,702,670,638,606,968,936,904, 872,840,808,776,1000,740,708,676,644,612,580,974,942,910,878,846,814,782, 1006,746,714,682,650,618,586,980,948,916,884,852,820,788,1012,752,720,688, 656,624,592,986,954,922,890,858,826,794,1018,758,726,694,662,630,598,992,960, 928,896,864,832,800,768,764,732,700,668,636,604,998,966,934,902,870,838,806, 774,738,706,674,642,610,578,972,940,908,876,844,812,780,1004,744,712,680,648, 616,584,1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016, 756,692,628,990,926,862,798,730,666,602,964,900,836,772,704,640,576,938,874, 810,1002,742,678,614,976,912,848,784,716,652,588,950,886,822,1014,754,690, 626] [views:debug,2014-08-19T16:48:58.053,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/576. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:58.053,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",576,active,0} [ns_server:debug,2014-08-19T16:48:58.120,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 574. Nacking mccouch update. [views:debug,2014-08-19T16:48:58.120,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/574. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:58.120,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",574,active,0} [ns_server:debug,2014-08-19T16:48:58.121,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,996,932, 868,804,736,672,608,970,906,842,778,710,646,582,944,880,816,1008,748,684,620, 982,918,854,790,722,658,594,956,892,828,1020,760,728,696,664,632,600,994,962, 930,898,866,834,802,770,766,734,702,670,638,606,574,968,936,904,872,840,808, 776,1000,740,708,676,644,612,580,974,942,910,878,846,814,782,1006,746,714, 682,650,618,586,980,948,916,884,852,820,788,1012,752,720,688,656,624,592,986, 954,922,890,858,826,794,1018,758,726,694,662,630,598,992,960,928,896,864,832, 800,768,764,732,700,668,636,604,998,966,934,902,870,838,806,774,738,706,674, 642,610,578,972,940,908,876,844,812,780,1004,744,712,680,648,616,584,1023, 978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692,628, 990,926,862,798,730,666,602,964,900,836,772,704,640,576,938,874,810,1002,742, 678,614,976,912,848,784,716,652,588,950,886,822,1014,754,690,626,988,924,860, 796] [views:debug,2014-08-19T16:48:58.168,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/574. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:58.169,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",574,active,0} [ns_server:debug,2014-08-19T16:48:58.269,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 572. Nacking mccouch update. [views:debug,2014-08-19T16:48:58.269,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/572. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:58.269,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",572,active,0} [ns_server:debug,2014-08-19T16:48:58.269,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,996,932, 868,804,736,672,608,970,906,842,778,710,646,582,944,880,816,1008,748,684,620, 982,918,854,790,722,658,594,956,892,828,1020,760,728,696,664,632,600,994,962, 930,898,866,834,802,770,766,734,702,670,638,606,574,968,936,904,872,840,808, 776,1000,740,708,676,644,612,580,974,942,910,878,846,814,782,1006,746,714, 682,650,618,586,980,948,916,884,852,820,788,1012,752,720,688,656,624,592,986, 954,922,890,858,826,794,1018,758,726,694,662,630,598,992,960,928,896,864,832, 800,768,764,732,700,668,636,604,572,998,966,934,902,870,838,806,774,738,706, 674,642,610,578,972,940,908,876,844,812,780,1004,744,712,680,648,616,584, 1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692, 628,990,926,862,798,730,666,602,964,900,836,772,704,640,576,938,874,810,1002, 742,678,614,976,912,848,784,716,652,588,950,886,822,1014,754,690,626,988,924, 860,796] [views:debug,2014-08-19T16:48:58.353,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/572. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:58.353,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",572,active,0} [ns_server:debug,2014-08-19T16:48:58.519,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 570. Nacking mccouch update. [views:debug,2014-08-19T16:48:58.519,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/570. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:58.520,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",570,active,0} [ns_server:debug,2014-08-19T16:48:58.520,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,570,996, 932,868,804,736,672,608,970,906,842,778,710,646,582,944,880,816,1008,748,684, 620,982,918,854,790,722,658,594,956,892,828,1020,760,728,696,664,632,600,994, 962,930,898,866,834,802,770,766,734,702,670,638,606,574,968,936,904,872,840, 808,776,1000,740,708,676,644,612,580,974,942,910,878,846,814,782,1006,746, 714,682,650,618,586,980,948,916,884,852,820,788,1012,752,720,688,656,624,592, 986,954,922,890,858,826,794,1018,758,726,694,662,630,598,992,960,928,896,864, 832,800,768,764,732,700,668,636,604,572,998,966,934,902,870,838,806,774,738, 706,674,642,610,578,972,940,908,876,844,812,780,1004,744,712,680,648,616,584, 1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692, 628,990,926,862,798,730,666,602,964,900,836,772,704,640,576,938,874,810,1002, 742,678,614,976,912,848,784,716,652,588,950,886,822,1014,754,690,626,988,924, 860,796] [views:debug,2014-08-19T16:48:58.603,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/570. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:58.604,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",570,active,0} [ns_server:debug,2014-08-19T16:48:58.778,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 568. Nacking mccouch update. [views:debug,2014-08-19T16:48:58.779,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/568. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:58.779,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",568,active,0} [ns_server:debug,2014-08-19T16:48:58.779,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,570,996, 932,868,804,736,672,608,970,906,842,778,710,646,582,944,880,816,1008,748,684, 620,982,918,854,790,722,658,594,956,892,828,1020,760,728,696,664,632,600,568, 994,962,930,898,866,834,802,770,766,734,702,670,638,606,574,968,936,904,872, 840,808,776,1000,740,708,676,644,612,580,974,942,910,878,846,814,782,1006, 746,714,682,650,618,586,980,948,916,884,852,820,788,1012,752,720,688,656,624, 592,986,954,922,890,858,826,794,1018,758,726,694,662,630,598,992,960,928,896, 864,832,800,768,764,732,700,668,636,604,572,998,966,934,902,870,838,806,774, 738,706,674,642,610,578,972,940,908,876,844,812,780,1004,744,712,680,648,616, 584,1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756, 692,628,990,926,862,798,730,666,602,964,900,836,772,704,640,576,938,874,810, 1002,742,678,614,976,912,848,784,716,652,588,950,886,822,1014,754,690,626, 988,924,860,796] [views:debug,2014-08-19T16:48:58.862,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/568. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:58.863,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",568,active,0} [ns_server:debug,2014-08-19T16:48:59.038,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 566. Nacking mccouch update. [views:debug,2014-08-19T16:48:59.038,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/566. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:59.038,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",566,active,0} [ns_server:debug,2014-08-19T16:48:59.038,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,570,996, 932,868,804,736,672,608,970,906,842,778,710,646,582,944,880,816,1008,748,684, 620,982,918,854,790,722,658,594,956,892,828,1020,760,728,696,664,632,600,568, 994,962,930,898,866,834,802,770,766,734,702,670,638,606,574,968,936,904,872, 840,808,776,1000,740,708,676,644,612,580,974,942,910,878,846,814,782,1006, 746,714,682,650,618,586,980,948,916,884,852,820,788,1012,752,720,688,656,624, 592,986,954,922,890,858,826,794,1018,758,726,694,662,630,598,566,992,960,928, 896,864,832,800,768,764,732,700,668,636,604,572,998,966,934,902,870,838,806, 774,738,706,674,642,610,578,972,940,908,876,844,812,780,1004,744,712,680,648, 616,584,1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016, 756,692,628,990,926,862,798,730,666,602,964,900,836,772,704,640,576,938,874, 810,1002,742,678,614,976,912,848,784,716,652,588,950,886,822,1014,754,690, 626,988,924,860,796] [views:debug,2014-08-19T16:48:59.122,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/566. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:59.122,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",566,active,0} [ns_server:debug,2014-08-19T16:48:59.287,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 564. Nacking mccouch update. [views:debug,2014-08-19T16:48:59.288,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/564. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:59.288,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",564,active,0} [ns_server:debug,2014-08-19T16:48:59.288,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,570,996, 932,868,804,736,672,608,970,906,842,778,710,646,582,944,880,816,1008,748,684, 620,982,918,854,790,722,658,594,956,892,828,1020,760,696,632,568,994,962,930, 898,866,834,802,770,766,734,702,670,638,606,574,968,936,904,872,840,808,776, 1000,740,708,676,644,612,580,974,942,910,878,846,814,782,1006,746,714,682, 650,618,586,980,948,916,884,852,820,788,1012,752,720,688,656,624,592,986,954, 922,890,858,826,794,1018,758,726,694,662,630,598,566,992,960,928,896,864,832, 800,768,764,732,700,668,636,604,572,998,966,934,902,870,838,806,774,738,706, 674,642,610,578,972,940,908,876,844,812,780,1004,744,712,680,648,616,584, 1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692, 628,564,990,926,862,798,730,666,602,964,900,836,772,704,640,576,938,874,810, 1002,742,678,614,976,912,848,784,716,652,588,950,886,822,1014,754,690,626, 988,924,860,796,728,664,600] [views:debug,2014-08-19T16:48:59.321,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/564. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:59.321,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",564,active,0} [ns_server:debug,2014-08-19T16:48:59.388,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 562. Nacking mccouch update. [views:debug,2014-08-19T16:48:59.388,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/562. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:59.388,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",562,active,0} [ns_server:debug,2014-08-19T16:48:59.389,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,570,996, 932,868,804,736,672,608,970,906,842,778,710,646,582,944,880,816,1008,748,684, 620,982,918,854,790,722,658,594,956,892,828,1020,760,696,632,568,994,962,930, 898,866,834,802,770,766,734,702,670,638,606,574,968,936,904,872,840,808,776, 1000,740,708,676,644,612,580,974,942,910,878,846,814,782,1006,746,714,682, 650,618,586,980,948,916,884,852,820,788,1012,752,720,688,656,624,592,986,954, 922,890,858,826,794,1018,758,726,694,662,630,598,566,992,960,928,896,864,832, 800,768,764,732,700,668,636,604,572,998,966,934,902,870,838,806,774,738,706, 674,642,610,578,972,940,908,876,844,812,780,1004,744,712,680,648,616,584, 1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692, 628,564,990,926,862,798,730,666,602,964,900,836,772,704,640,576,938,874,810, 1002,742,678,614,976,912,848,784,716,652,588,950,886,822,1014,754,690,626, 562,988,924,860,796,728,664,600] [views:debug,2014-08-19T16:48:59.422,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/562. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:59.422,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",562,active,0} [ns_server:debug,2014-08-19T16:48:59.489,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 560. Nacking mccouch update. [views:debug,2014-08-19T16:48:59.489,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/560. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:59.489,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",560,active,0} [ns_server:debug,2014-08-19T16:48:59.490,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,570,996, 932,868,804,736,672,608,970,906,842,778,710,646,582,944,880,816,1008,748,684, 620,982,918,854,790,722,658,594,956,892,828,1020,760,696,632,568,994,962,930, 898,866,834,802,770,766,734,702,670,638,606,574,968,936,904,872,840,808,776, 1000,740,708,676,644,612,580,974,942,910,878,846,814,782,1006,746,714,682, 650,618,586,980,948,916,884,852,820,788,1012,752,720,688,656,624,592,560,986, 954,922,890,858,826,794,1018,758,726,694,662,630,598,566,992,960,928,896,864, 832,800,768,764,732,700,668,636,604,572,998,966,934,902,870,838,806,774,738, 706,674,642,610,578,972,940,908,876,844,812,780,1004,744,712,680,648,616,584, 1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692, 628,564,990,926,862,798,730,666,602,964,900,836,772,704,640,576,938,874,810, 1002,742,678,614,976,912,848,784,716,652,588,950,886,822,1014,754,690,626, 562,988,924,860,796,728,664,600] [views:debug,2014-08-19T16:48:59.523,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/560. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:59.523,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",560,active,0} [ns_server:debug,2014-08-19T16:48:59.590,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 558. Nacking mccouch update. [views:debug,2014-08-19T16:48:59.590,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/558. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:59.590,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",558,active,0} [ns_server:debug,2014-08-19T16:48:59.591,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,570, 996,932,868,804,736,672,608,970,906,842,778,710,646,582,944,880,816,1008,748, 684,620,982,918,854,790,722,658,594,956,892,828,1020,760,696,632,568,994,962, 930,898,866,834,802,770,766,734,702,670,638,606,574,968,936,904,872,840,808, 776,1000,740,708,676,644,612,580,974,942,910,878,846,814,782,1006,746,714, 682,650,618,586,980,948,916,884,852,820,788,1012,752,720,688,656,624,592,560, 986,954,922,890,858,826,794,1018,758,726,694,662,630,598,566,992,960,928,896, 864,832,800,768,764,732,700,668,636,604,572,998,966,934,902,870,838,806,774, 738,706,674,642,610,578,972,940,908,876,844,812,780,1004,744,712,680,648,616, 584,1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756, 692,628,564,990,926,862,798,730,666,602,964,900,836,772,704,640,576,938,874, 810,1002,742,678,614,976,912,848,784,716,652,588,950,886,822,1014,754,690, 626,562,988,924,860,796,728,664,600] [views:debug,2014-08-19T16:48:59.624,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/558. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:59.625,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",558,active,0} [ns_server:debug,2014-08-19T16:48:59.691,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 556. Nacking mccouch update. [views:debug,2014-08-19T16:48:59.691,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/556. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:59.691,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",556,active,0} [ns_server:debug,2014-08-19T16:48:59.691,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,570, 996,932,868,804,736,672,608,970,906,842,778,710,646,582,944,880,816,1008,748, 684,620,556,982,918,854,790,722,658,594,956,892,828,1020,760,696,632,568,994, 962,930,898,866,834,802,770,766,734,702,670,638,606,574,968,936,904,872,840, 808,776,1000,740,708,676,644,612,580,974,942,910,878,846,814,782,1006,746, 714,682,650,618,586,980,948,916,884,852,820,788,1012,752,720,688,656,624,592, 560,986,954,922,890,858,826,794,1018,758,726,694,662,630,598,566,992,960,928, 896,864,832,800,768,764,732,700,668,636,604,572,998,966,934,902,870,838,806, 774,738,706,674,642,610,578,972,940,908,876,844,812,780,1004,744,712,680,648, 616,584,1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016, 756,692,628,564,990,926,862,798,730,666,602,964,900,836,772,704,640,576,938, 874,810,1002,742,678,614,976,912,848,784,716,652,588,950,886,822,1014,754, 690,626,562,988,924,860,796,728,664,600] [views:debug,2014-08-19T16:48:59.764,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/556. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:59.764,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",556,active,0} [ns_server:debug,2014-08-19T16:48:59.947,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 554. Nacking mccouch update. [views:debug,2014-08-19T16:48:59.947,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/554. Updated state: active (0) [ns_server:debug,2014-08-19T16:48:59.948,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",554,active,0} [ns_server:debug,2014-08-19T16:48:59.948,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,570, 996,932,868,804,736,672,608,970,906,842,778,710,646,582,944,880,816,1008,748, 684,620,556,982,918,854,790,722,658,594,956,892,828,1020,760,696,632,568,994, 930,866,802,766,734,702,670,638,606,574,968,936,904,872,840,808,776,1000,740, 708,676,644,612,580,974,942,910,878,846,814,782,1006,746,714,682,650,618,586, 554,980,948,916,884,852,820,788,1012,752,720,688,656,624,592,560,986,954,922, 890,858,826,794,1018,758,726,694,662,630,598,566,992,960,928,896,864,832,800, 768,764,732,700,668,636,604,572,998,966,934,902,870,838,806,774,738,706,674, 642,610,578,972,940,908,876,844,812,780,1004,744,712,680,648,616,584,1023, 978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692,628, 564,990,926,862,798,730,666,602,964,900,836,772,704,640,576,938,874,810,1002, 742,678,614,976,912,848,784,716,652,588,950,886,822,1014,754,690,626,562,988, 924,860,796,728,664,600,962,898,834,770] [views:debug,2014-08-19T16:49:00.023,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/554. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:00.023,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",554,active,0} [ns_server:debug,2014-08-19T16:49:00.173,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 552. Nacking mccouch update. [views:debug,2014-08-19T16:49:00.173,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/552. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:00.173,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",552,active,0} [ns_server:debug,2014-08-19T16:49:00.174,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,570, 996,932,868,804,736,672,608,970,906,842,778,710,646,582,944,880,816,1008,748, 684,620,556,982,918,854,790,722,658,594,956,892,828,1020,760,696,632,568,994, 930,866,802,766,734,702,670,638,606,574,968,936,904,872,840,808,776,1000,740, 708,676,644,612,580,974,942,910,878,846,814,782,1006,746,714,682,650,618,586, 554,980,948,916,884,852,820,788,1012,752,720,688,656,624,592,560,986,954,922, 890,858,826,794,1018,758,726,694,662,630,598,566,992,960,928,896,864,832,800, 768,764,732,700,668,636,604,572,998,966,934,902,870,838,806,774,738,706,674, 642,610,578,972,940,908,876,844,812,780,1004,744,712,680,648,616,584,552, 1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692, 628,564,990,926,862,798,730,666,602,964,900,836,772,704,640,576,938,874,810, 1002,742,678,614,976,912,848,784,716,652,588,950,886,822,1014,754,690,626, 562,988,924,860,796,728,664,600,962,898,834,770] [views:debug,2014-08-19T16:49:00.249,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/552. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:00.249,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",552,active,0} [ns_server:debug,2014-08-19T16:49:00.390,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 550. Nacking mccouch update. [views:debug,2014-08-19T16:49:00.390,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/550. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:00.391,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",550,active,0} [ns_server:debug,2014-08-19T16:49:00.391,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,570, 996,932,868,804,736,672,608,970,906,842,778,710,646,582,944,880,816,1008,748, 684,620,556,982,918,854,790,722,658,594,956,892,828,1020,760,696,632,568,994, 930,866,802,766,734,702,670,638,606,574,968,936,904,872,840,808,776,1000,740, 708,676,644,612,580,974,942,910,878,846,814,782,1006,746,714,682,650,618,586, 554,980,948,916,884,852,820,788,1012,752,720,688,656,624,592,560,986,954,922, 890,858,826,794,1018,758,726,694,662,630,598,566,992,960,928,896,864,832,800, 768,764,732,700,668,636,604,572,998,966,934,902,870,838,806,774,738,706,674, 642,610,578,972,940,908,876,844,812,780,1004,744,712,680,648,616,584,552, 1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692, 628,564,990,926,862,798,730,666,602,964,900,836,772,704,640,576,938,874,810, 1002,742,678,614,550,976,912,848,784,716,652,588,950,886,822,1014,754,690, 626,562,988,924,860,796,728,664,600,962,898,834,770] [views:debug,2014-08-19T16:49:00.458,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/550. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:00.459,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",550,active,0} [ns_server:debug,2014-08-19T16:49:00.608,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 548. Nacking mccouch update. [views:debug,2014-08-19T16:49:00.608,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/548. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:00.608,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",548,active,0} [ns_server:debug,2014-08-19T16:49:00.609,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,570, 996,932,868,804,736,672,608,970,906,842,778,710,646,582,944,880,816,1008,748, 684,620,556,982,918,854,790,722,658,594,956,892,828,1020,760,696,632,568,994, 930,866,802,766,734,702,670,638,606,574,968,936,904,872,840,808,776,1000,740, 708,676,644,612,580,548,974,942,910,878,846,814,782,1006,746,714,682,650,618, 586,554,980,948,916,884,852,820,788,1012,752,720,688,656,624,592,560,986,954, 922,890,858,826,794,1018,758,726,694,662,630,598,566,992,960,928,896,864,832, 800,768,764,732,700,668,636,604,572,998,966,934,902,870,838,806,774,738,706, 674,642,610,578,972,940,908,876,844,812,780,1004,744,712,680,648,616,584,552, 1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692, 628,564,990,926,862,798,730,666,602,964,900,836,772,704,640,576,938,874,810, 1002,742,678,614,550,976,912,848,784,716,652,588,950,886,822,1014,754,690, 626,562,988,924,860,796,728,664,600,962,898,834,770] [views:debug,2014-08-19T16:49:00.675,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/548. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:00.676,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",548,active,0} [ns_server:debug,2014-08-19T16:49:00.766,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 546. Nacking mccouch update. [views:debug,2014-08-19T16:49:00.766,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/546. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:00.766,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",546,active,0} [ns_server:debug,2014-08-19T16:49:00.767,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,570, 996,932,868,804,736,672,608,970,906,842,778,710,646,582,944,880,816,1008,748, 684,620,556,982,918,854,790,722,658,594,956,892,828,1020,760,696,632,568,994, 930,866,802,766,734,702,670,638,606,574,968,936,904,872,840,808,776,1000,740, 708,676,644,612,580,548,974,942,910,878,846,814,782,1006,746,714,682,650,618, 586,554,980,948,916,884,852,820,788,1012,752,720,688,656,624,592,560,986,954, 922,890,858,826,794,1018,758,726,694,662,630,598,566,992,960,928,896,864,832, 800,768,764,732,700,668,636,604,572,998,966,934,902,870,838,806,774,738,706, 674,642,610,578,546,972,940,908,876,844,812,780,1004,744,712,680,648,616,584, 552,1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756, 692,628,564,990,926,862,798,730,666,602,964,900,836,772,704,640,576,938,874, 810,1002,742,678,614,550,976,912,848,784,716,652,588,950,886,822,1014,754, 690,626,562,988,924,860,796,728,664,600,962,898,834,770] [views:debug,2014-08-19T16:49:00.801,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/546. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:00.801,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",546,active,0} [ns_server:debug,2014-08-19T16:49:00.867,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 544. Nacking mccouch update. [views:debug,2014-08-19T16:49:00.867,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/544. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:00.867,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",544,active,0} [ns_server:debug,2014-08-19T16:49:00.868,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,570, 996,932,868,804,736,672,608,544,970,906,842,778,710,646,582,944,880,816,1008, 748,684,620,556,982,918,854,790,722,658,594,956,892,828,1020,760,696,632,568, 994,930,866,802,734,670,606,968,936,904,872,840,808,776,1000,740,708,676,644, 612,580,548,974,942,910,878,846,814,782,1006,746,714,682,650,618,586,554,980, 948,916,884,852,820,788,1012,752,720,688,656,624,592,560,986,954,922,890,858, 826,794,1018,758,726,694,662,630,598,566,992,960,928,896,864,832,800,768,764, 732,700,668,636,604,572,998,966,934,902,870,838,806,774,738,706,674,642,610, 578,546,972,940,908,876,844,812,780,1004,744,712,680,648,616,584,552,1023, 978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692,628, 564,990,926,862,798,730,666,602,964,900,836,772,704,640,576,938,874,810,1002, 742,678,614,550,976,912,848,784,716,652,588,950,886,822,1014,754,690,626,562, 988,924,860,796,728,664,600,962,898,834,770,766,702,638,574] [views:debug,2014-08-19T16:49:00.901,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/544. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:00.901,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",544,active,0} [ns_server:debug,2014-08-19T16:49:00.968,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 542. Nacking mccouch update. [views:debug,2014-08-19T16:49:00.968,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/542. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:00.968,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",542,active,0} [ns_server:debug,2014-08-19T16:49:00.969,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,570, 996,932,868,804,736,672,608,544,970,906,842,778,710,646,582,944,880,816,1008, 748,684,620,556,982,918,854,790,722,658,594,956,892,828,1020,760,696,632,568, 994,930,866,802,734,670,606,542,968,936,904,872,840,808,776,1000,740,708,676, 644,612,580,548,974,942,910,878,846,814,782,1006,746,714,682,650,618,586,554, 980,948,916,884,852,820,788,1012,752,720,688,656,624,592,560,986,954,922,890, 858,826,794,1018,758,726,694,662,630,598,566,992,960,928,896,864,832,800,768, 764,732,700,668,636,604,572,998,966,934,902,870,838,806,774,738,706,674,642, 610,578,546,972,940,908,876,844,812,780,1004,744,712,680,648,616,584,552, 1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692, 628,564,990,926,862,798,730,666,602,964,900,836,772,704,640,576,938,874,810, 1002,742,678,614,550,976,912,848,784,716,652,588,950,886,822,1014,754,690, 626,562,988,924,860,796,728,664,600,962,898,834,770,766,702,638,574] [views:debug,2014-08-19T16:49:01.002,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/542. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:01.003,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",542,active,0} [ns_server:debug,2014-08-19T16:49:01.068,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 540. Nacking mccouch update. [views:debug,2014-08-19T16:49:01.069,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/540. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:01.069,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",540,active,0} [ns_server:debug,2014-08-19T16:49:01.069,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,570, 996,932,868,804,736,672,608,544,970,906,842,778,710,646,582,944,880,816,1008, 748,684,620,556,982,918,854,790,722,658,594,956,892,828,1020,760,696,632,568, 994,930,866,802,734,670,606,542,968,936,904,872,840,808,776,1000,740,708,676, 644,612,580,548,974,942,910,878,846,814,782,1006,746,714,682,650,618,586,554, 980,948,916,884,852,820,788,1012,752,720,688,656,624,592,560,986,954,922,890, 858,826,794,1018,758,726,694,662,630,598,566,992,960,928,896,864,832,800,768, 764,732,700,668,636,604,572,540,998,966,934,902,870,838,806,774,738,706,674, 642,610,578,546,972,940,908,876,844,812,780,1004,744,712,680,648,616,584,552, 1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692, 628,564,990,926,862,798,730,666,602,964,900,836,772,704,640,576,938,874,810, 1002,742,678,614,550,976,912,848,784,716,652,588,950,886,822,1014,754,690, 626,562,988,924,860,796,728,664,600,962,898,834,770,766,702,638,574] [views:debug,2014-08-19T16:49:01.103,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/540. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:01.103,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",540,active,0} [ns_server:debug,2014-08-19T16:49:01.185,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 538. Nacking mccouch update. [views:debug,2014-08-19T16:49:01.185,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/538. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:01.185,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",538,active,0} [ns_server:debug,2014-08-19T16:49:01.185,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,570, 996,932,868,804,736,672,608,544,970,906,842,778,710,646,582,944,880,816,1008, 748,684,620,556,982,918,854,790,722,658,594,956,892,828,1020,760,696,632,568, 994,930,866,802,734,670,606,542,968,936,904,872,840,808,776,1000,740,708,676, 644,612,580,548,974,942,910,878,846,814,782,1006,746,714,682,650,618,586,554, 980,948,916,884,852,820,788,1012,752,720,688,656,624,592,560,986,954,922,890, 858,826,794,1018,758,726,694,662,630,598,566,992,960,928,896,864,832,800,768, 764,732,700,668,636,604,572,540,998,966,934,902,870,838,806,774,738,706,674, 642,610,578,546,972,940,908,876,844,812,780,1004,744,712,680,648,616,584,552, 1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692, 628,564,990,926,862,798,730,666,602,538,964,900,836,772,704,640,576,938,874, 810,1002,742,678,614,550,976,912,848,784,716,652,588,950,886,822,1014,754, 690,626,562,988,924,860,796,728,664,600,962,898,834,770,766,702,638,574] [views:debug,2014-08-19T16:49:01.252,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/538. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:01.252,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",538,active,0} [ns_server:debug,2014-08-19T16:49:01.419,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 536. Nacking mccouch update. [views:debug,2014-08-19T16:49:01.419,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/536. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:01.419,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",536,active,0} [ns_server:debug,2014-08-19T16:49:01.420,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,570, 996,932,868,804,736,672,608,544,970,906,842,778,710,646,582,944,880,816,1008, 748,684,620,556,982,918,854,790,722,658,594,956,892,828,1020,760,696,632,568, 994,930,866,802,734,670,606,542,968,936,904,872,840,808,776,1000,740,708,676, 644,612,580,548,974,942,910,878,846,814,782,1006,746,714,682,650,618,586,554, 980,948,916,884,852,820,788,1012,752,720,688,656,624,592,560,986,954,922,890, 858,826,794,1018,758,726,694,662,630,598,566,992,960,928,896,864,832,800,768, 764,732,700,668,636,604,572,540,998,966,934,902,870,838,806,774,738,706,674, 642,610,578,546,972,940,908,876,844,812,780,1004,744,712,680,648,616,584,552, 1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692, 628,564,990,926,862,798,730,666,602,538,964,900,836,772,704,640,576,938,874, 810,1002,742,678,614,550,976,912,848,784,716,652,588,950,886,822,1014,754, 690,626,562,988,924,860,796,728,664,600,536,962,898,834,770,766,702,638,574] [views:debug,2014-08-19T16:49:01.494,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/536. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:01.494,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",536,active,0} [ns_server:debug,2014-08-19T16:49:01.661,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 534. Nacking mccouch update. [views:debug,2014-08-19T16:49:01.661,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/534. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:01.662,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",534,active,0} [ns_server:debug,2014-08-19T16:49:01.662,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,958,894,830,1022,762,698,634,570, 996,932,868,804,736,672,608,544,970,906,842,778,710,646,582,944,880,816,1008, 748,684,620,556,982,918,854,790,722,658,594,956,892,828,1020,760,696,632,568, 994,930,866,802,734,670,606,542,968,904,840,776,740,708,676,644,612,580,548, 974,942,910,878,846,814,782,1006,746,714,682,650,618,586,554,980,948,916,884, 852,820,788,1012,752,720,688,656,624,592,560,986,954,922,890,858,826,794, 1018,758,726,694,662,630,598,566,534,992,960,928,896,864,832,800,768,764,732, 700,668,636,604,572,540,998,966,934,902,870,838,806,774,738,706,674,642,610, 578,546,972,940,908,876,844,812,780,1004,744,712,680,648,616,584,552,1023, 978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692,628, 564,990,926,862,798,730,666,602,538,964,900,836,772,704,640,576,938,874,810, 1002,742,678,614,550,976,912,848,784,716,652,588,950,886,822,1014,754,690, 626,562,988,924,860,796,728,664,600,536,962,898,834,770,766,702,638,574,936, 872,808,1000] [views:debug,2014-08-19T16:49:01.745,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/534. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:01.746,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",534,active,0} [ns_server:debug,2014-08-19T16:49:01.920,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 532. Nacking mccouch update. [views:debug,2014-08-19T16:49:01.920,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/532. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:01.921,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",532,active,0} [ns_server:debug,2014-08-19T16:49:01.921,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,532,958,894,830,1022,762,698,634, 570,996,932,868,804,736,672,608,544,970,906,842,778,710,646,582,944,880,816, 1008,748,684,620,556,982,918,854,790,722,658,594,956,892,828,1020,760,696, 632,568,994,930,866,802,734,670,606,542,968,904,840,776,740,708,676,644,612, 580,548,974,942,910,878,846,814,782,1006,746,714,682,650,618,586,554,980,948, 916,884,852,820,788,1012,752,720,688,656,624,592,560,986,954,922,890,858,826, 794,1018,758,726,694,662,630,598,566,534,992,960,928,896,864,832,800,768,764, 732,700,668,636,604,572,540,998,966,934,902,870,838,806,774,738,706,674,642, 610,578,546,972,940,908,876,844,812,780,1004,744,712,680,648,616,584,552, 1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692, 628,564,990,926,862,798,730,666,602,538,964,900,836,772,704,640,576,938,874, 810,1002,742,678,614,550,976,912,848,784,716,652,588,950,886,822,1014,754, 690,626,562,988,924,860,796,728,664,600,536,962,898,834,770,766,702,638,574, 936,872,808,1000] [views:debug,2014-08-19T16:49:02.004,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/532. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:02.005,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",532,active,0} [ns_server:debug,2014-08-19T16:49:02.171,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 530. Nacking mccouch update. [views:debug,2014-08-19T16:49:02.171,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/530. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:02.171,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",530,active,0} [ns_server:debug,2014-08-19T16:49:02.172,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,532,958,894,830,1022,762,698,634, 570,996,932,868,804,736,672,608,544,970,906,842,778,710,646,582,944,880,816, 1008,748,684,620,556,982,918,854,790,722,658,594,530,956,892,828,1020,760, 696,632,568,994,930,866,802,734,670,606,542,968,904,840,776,740,708,676,644, 612,580,548,974,942,910,878,846,814,782,1006,746,714,682,650,618,586,554,980, 948,916,884,852,820,788,1012,752,720,688,656,624,592,560,986,954,922,890,858, 826,794,1018,758,726,694,662,630,598,566,534,992,960,928,896,864,832,800,768, 764,732,700,668,636,604,572,540,998,966,934,902,870,838,806,774,738,706,674, 642,610,578,546,972,940,908,876,844,812,780,1004,744,712,680,648,616,584,552, 1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756,692, 628,564,990,926,862,798,730,666,602,538,964,900,836,772,704,640,576,938,874, 810,1002,742,678,614,550,976,912,848,784,716,652,588,950,886,822,1014,754, 690,626,562,988,924,860,796,728,664,600,536,962,898,834,770,766,702,638,574, 936,872,808,1000] [views:debug,2014-08-19T16:49:02.230,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/530. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:02.231,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",530,active,0} [ns_server:debug,2014-08-19T16:49:02.321,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 528. Nacking mccouch update. [views:debug,2014-08-19T16:49:02.321,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/528. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:02.321,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",528,active,0} [ns_server:debug,2014-08-19T16:49:02.322,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,532,958,894,830,1022,762,698,634, 570,996,932,868,804,736,672,608,544,970,906,842,778,710,646,582,944,880,816, 1008,748,684,620,556,982,918,854,790,722,658,594,530,956,892,828,1020,760, 696,632,568,994,930,866,802,734,670,606,542,968,904,840,776,740,708,676,644, 612,580,548,974,942,910,878,846,814,782,1006,746,714,682,650,618,586,554,980, 948,916,884,852,820,788,1012,752,720,688,656,624,592,560,528,986,954,922,890, 858,826,794,1018,758,726,694,662,630,598,566,534,992,960,928,896,864,832,800, 768,764,732,700,668,636,604,572,540,998,966,934,902,870,838,806,774,738,706, 674,642,610,578,546,972,940,908,876,844,812,780,1004,744,712,680,648,616,584, 552,1023,978,946,914,882,850,818,786,1010,718,654,590,952,888,824,1016,756, 692,628,564,990,926,862,798,730,666,602,538,964,900,836,772,704,640,576,938, 874,810,1002,742,678,614,550,976,912,848,784,716,652,588,950,886,822,1014, 754,690,626,562,988,924,860,796,728,664,600,536,962,898,834,770,766,702,638, 574,936,872,808,1000] [views:debug,2014-08-19T16:49:02.355,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/528. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:02.357,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",528,active,0} [ns_server:debug,2014-08-19T16:49:02.422,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 526. Nacking mccouch update. [views:debug,2014-08-19T16:49:02.422,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/526. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:02.422,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",526,active,0} [ns_server:debug,2014-08-19T16:49:02.423,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,532,958,894,830,1022,762,698,634, 570,996,932,868,804,736,672,608,544,970,906,842,778,710,646,582,944,880,816, 1008,748,684,620,556,982,918,854,790,722,658,594,530,956,892,828,1020,760, 696,632,568,994,930,866,802,734,670,606,542,968,904,840,776,740,708,676,644, 612,580,548,974,942,910,878,846,814,782,1006,746,714,682,650,618,586,554,980, 948,916,884,852,820,788,1012,752,720,688,656,624,592,560,528,986,954,922,890, 858,826,794,1018,758,726,694,662,630,598,566,534,992,960,928,896,864,832,800, 768,764,732,700,668,636,604,572,540,998,966,934,902,870,838,806,774,738,706, 674,642,610,578,546,972,940,908,876,844,812,780,1004,744,712,680,648,616,584, 552,1023,978,946,914,882,850,818,786,1010,718,654,590,526,952,888,824,1016, 756,692,628,564,990,926,862,798,730,666,602,538,964,900,836,772,704,640,576, 938,874,810,1002,742,678,614,550,976,912,848,784,716,652,588,950,886,822, 1014,754,690,626,562,988,924,860,796,728,664,600,536,962,898,834,770,766,702, 638,574,936,872,808,1000] [views:debug,2014-08-19T16:49:02.457,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/526. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:02.457,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",526,active,0} [ns_server:debug,2014-08-19T16:49:02.531,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 524. Nacking mccouch update. [views:debug,2014-08-19T16:49:02.531,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/524. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:02.531,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",524,active,0} [ns_server:debug,2014-08-19T16:49:02.532,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,532,958,894,830,1022,762,698,634, 570,996,932,868,804,736,672,608,544,970,906,842,778,710,646,582,944,880,816, 1008,748,684,620,556,982,918,854,790,722,658,594,530,956,892,828,1020,760, 696,632,568,994,930,866,802,734,670,606,542,968,904,840,776,708,644,580,974, 942,910,878,846,814,782,1006,746,714,682,650,618,586,554,980,948,916,884,852, 820,788,1012,752,720,688,656,624,592,560,528,986,954,922,890,858,826,794, 1018,758,726,694,662,630,598,566,534,992,960,928,896,864,832,800,768,764,732, 700,668,636,604,572,540,998,966,934,902,870,838,806,774,738,706,674,642,610, 578,546,972,940,908,876,844,812,780,1004,744,712,680,648,616,584,552,1023, 978,946,914,882,850,818,786,1010,718,654,590,526,952,888,824,1016,756,692, 628,564,990,926,862,798,730,666,602,538,964,900,836,772,704,640,576,938,874, 810,1002,742,678,614,550,976,912,848,784,716,652,588,524,950,886,822,1014, 754,690,626,562,988,924,860,796,728,664,600,536,962,898,834,770,766,702,638, 574,936,872,808,1000,740,676,612,548] [views:debug,2014-08-19T16:49:02.566,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/524. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:02.566,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",524,active,0} [ns_server:debug,2014-08-19T16:49:02.632,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 522. Nacking mccouch update. [views:debug,2014-08-19T16:49:02.632,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/522. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:02.632,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",522,active,0} [ns_server:debug,2014-08-19T16:49:02.633,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,532,958,894,830,1022,762,698,634, 570,996,932,868,804,736,672,608,544,970,906,842,778,710,646,582,944,880,816, 1008,748,684,620,556,982,918,854,790,722,658,594,530,956,892,828,1020,760, 696,632,568,994,930,866,802,734,670,606,542,968,904,840,776,708,644,580,974, 942,910,878,846,814,782,1006,746,714,682,650,618,586,554,522,980,948,916,884, 852,820,788,1012,752,720,688,656,624,592,560,528,986,954,922,890,858,826,794, 1018,758,726,694,662,630,598,566,534,992,960,928,896,864,832,800,768,764,732, 700,668,636,604,572,540,998,966,934,902,870,838,806,774,738,706,674,642,610, 578,546,972,940,908,876,844,812,780,1004,744,712,680,648,616,584,552,1023, 978,946,914,882,850,818,786,1010,718,654,590,526,952,888,824,1016,756,692, 628,564,990,926,862,798,730,666,602,538,964,900,836,772,704,640,576,938,874, 810,1002,742,678,614,550,976,912,848,784,716,652,588,524,950,886,822,1014, 754,690,626,562,988,924,860,796,728,664,600,536,962,898,834,770,766,702,638, 574,936,872,808,1000,740,676,612,548] [views:debug,2014-08-19T16:49:02.666,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/522. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:02.667,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",522,active,0} [ns_server:debug,2014-08-19T16:49:02.813,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 520. Nacking mccouch update. [views:debug,2014-08-19T16:49:02.813,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/520. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:02.813,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",520,active,0} [ns_server:debug,2014-08-19T16:49:02.814,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,532,958,894,830,1022,762,698,634, 570,996,932,868,804,736,672,608,544,970,906,842,778,710,646,582,944,880,816, 1008,748,684,620,556,982,918,854,790,722,658,594,530,956,892,828,1020,760, 696,632,568,994,930,866,802,734,670,606,542,968,904,840,776,708,644,580,974, 942,910,878,846,814,782,1006,746,714,682,650,618,586,554,522,980,948,916,884, 852,820,788,1012,752,720,688,656,624,592,560,528,986,954,922,890,858,826,794, 1018,758,726,694,662,630,598,566,534,992,960,928,896,864,832,800,768,764,732, 700,668,636,604,572,540,998,966,934,902,870,838,806,774,738,706,674,642,610, 578,546,972,940,908,876,844,812,780,1004,744,712,680,648,616,584,552,520, 1023,978,946,914,882,850,818,786,1010,718,654,590,526,952,888,824,1016,756, 692,628,564,990,926,862,798,730,666,602,538,964,900,836,772,704,640,576,938, 874,810,1002,742,678,614,550,976,912,848,784,716,652,588,524,950,886,822, 1014,754,690,626,562,988,924,860,796,728,664,600,536,962,898,834,770,766,702, 638,574,936,872,808,1000,740,676,612,548] [cluster:debug,2014-08-19T16:49:02.840,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:handle_call:153]handling add_node("10.242.238.90", 8091, undefined, ..) [cluster:debug,2014-08-19T16:49:02.843,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:do_add_node_with_connectivity:505]Posting node info to engage_cluster on {"10.242.238.90",8091}: {struct, [{<<"requestedTargetNodeHostname">>,<<"10.242.238.90">>}, {availableStorage, {struct, [{hdd, [{struct, [{path,<<"/">>}, {sizeKBytes,103212320}, {usagePercent,3}]}, {struct, [{path,<<"/dev/shm">>}, {sizeKBytes,49515824}, {usagePercent,0}]}, {struct, [{path,<<"/boot">>}, {sizeKBytes,198337}, {usagePercent,17}]}, {struct, [{path,<<"/data">>}, {sizeKBytes,329573012}, {usagePercent,1}]}, {struct, [{path,<<"/test">>}, {sizeKBytes,528447160}, {usagePercent,1}]}, {struct, [{path,<<"/var/lib/pgsql">>}, {sizeKBytes,1922866992}, {usagePercent,1}]}]}]}}, {memoryQuota,90112}, {storageTotals, {struct, [{ram, {struct, [{total,101408407552}, {quotaTotal,94489280512}, {quotaUsed,13369344000}, {used,13174808576}, {usedByData,31847576}]}}, {hdd, {struct, [{total,1969015799808}, {quotaTotal,1969015799808}, {used,19690157998}, {usedByData,2736915}, {free,1949325641810}]}}]}}, {storage, {struct, [{ssd,[]}, {hdd, [{struct, [{path,<<"/var/lib/pgsql">>}, {index_path,<<"/var/lib/pgsql">>}, {quotaMb,none}, {state,ok}]}]}]}}, {systemStats, {struct, [{cpu_utilization_rate,0.6265664160401002}, {swap_total,0}, {swap_used,0}, {mem_total,101408407552}, {mem_free,89866596352}]}}, {interestingStats, {struct, [{cmd_get,0.0}, {couch_docs_actual_disk_size,2736915}, {couch_docs_data_size,2729956}, {couch_views_actual_disk_size,0}, {couch_views_data_size,0}, {curr_items,0}, {curr_items_tot,0}, {ep_bg_fetched,0.0}, {get_hits,0.0}, {mem_used,31847576}, {ops,0.0}, {vb_replica_curr_items,0}]}}, {uptime,<<"4088">>}, {memoryTotal,101408407552}, {memoryFree,89866596352}, {mcdMemoryReserved,77368}, {mcdMemoryAllocated,77368}, {couchApiBase,<<"http://10.242.238.88:8092/">>}, {otpCookie,<<"xyzevwdfypcplvpp">>}, {clusterMembership,<<"active">>}, {status,<<"healthy">>}, {otpNode,<<"ns_1@10.242.238.88">>}, {thisNode,true}, {hostname,<<"10.242.238.88:8091">>}, {clusterCompatibility,131077}, {version,<<"2.5.1-1083-rel-enterprise">>}, {os,<<"x86_64-unknown-linux-gnu">>}, {ports, {struct, [{httpsMgmt,18091}, {httpsCAPI,18092}, {sslProxy,11214}, {proxy,11211}, {direct,11210}]}}]} [views:debug,2014-08-19T16:49:02.897,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/520. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:02.897,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",520,active,0} [cluster:debug,2014-08-19T16:49:02.977,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:do_add_node_with_connectivity:512]Reply from engage_cluster on {"10.242.238.90",8091}: {ok,{struct,[{<<"availableStorage">>, {struct,[{<<"hdd">>, [{struct,[{<<"path">>,<<"/">>}, {<<"sizeKBytes">>,103212320}, {<<"usagePercent">>,3}]}, {struct,[{<<"path">>,<<"/dev/shm">>}, {<<"sizeKBytes">>,49515824}, {<<"usagePercent">>,0}]}, {struct,[{<<"path">>,<<"/boot">>}, {<<"sizeKBytes">>,198337}, {<<"usagePercent">>,17}]}, {struct,[{<<"path">>,<<"/data">>}, {<<"sizeKBytes">>,329573012}, {<<"usagePercent">>,1}]}, {struct,[{<<"path">>,<<"/test">>}, {<<"sizeKBytes">>,528447160}, {<<"usagePercent">>,1}]}, {struct,[{<<"path">>,<<"/var/lib/pgsql">>}, {<<"sizeKBytes">>,1922866992}, {<<"usagePercent">>,1}]}]}]}}, {<<"memoryQuota">>,58026}, {<<"storageTotals">>, {struct,[{<<"ram">>, {struct,[{<<"total">>,101408407552}, {<<"quotaTotal">>,60844670976}, {<<"quotaUsed">>,0}, {<<"used">>,12913496064}, {<<"usedByData">>,0}]}}, {<<"hdd">>, {struct,[{<<"total">>,1969015799808}, {<<"quotaTotal">>,1969015799808}, {<<"used">>,19690157998}, {<<"usedByData">>,0}, {<<"free">>,1949325641810}]}}]}}, {<<"storage">>, {struct,[{<<"ssd">>,[]}, {<<"hdd">>, [{struct,[{<<"path">>,<<"/var/lib/pgsql">>}, {<<"index_path">>,<<"/var/lib/pgsql">>}, {<<"quotaMb">>,<<"none">>}, {<<"state">>,<<"ok">>}]}]}]}}, {<<"systemStats">>, {struct,[{<<"cpu_utilization_rate">>,0.2083333333333333}, {<<"swap_total">>,0}, {<<"swap_used">>,0}, {<<"mem_total">>,101408407552}, {<<"mem_free">>,90145210368}]}}, {<<"interestingStats">>,{struct,[]}}, {<<"uptime">>,<<"3753">>}, {<<"memoryTotal">>,101408407552}, {<<"memoryFree">>,90145210368}, {<<"mcdMemoryReserved">>,77368}, {<<"mcdMemoryAllocated">>,77368}, {<<"couchApiBase">>,<<"http://10.242.238.90:8092/">>}, {<<"otpCookie">>,<<"nntvfgasfojamdnn">>}, {<<"clusterMembership">>,<<"active">>}, {<<"status">>,<<"healthy">>}, {<<"otpNode">>,<<"ns_1@10.242.238.90">>}, {<<"thisNode">>,true}, {<<"hostname">>,<<"10.242.238.90:8091">>}, {<<"clusterCompatibility">>,131077}, {<<"version">>,<<"2.5.1-1083-rel-enterprise">>}, {<<"os">>,<<"x86_64-unknown-linux-gnu">>}, {<<"ports">>, {struct,[{<<"httpsMgmt">>,18091}, {<<"httpsCAPI">>,18092}, {<<"sslProxy">>,11214}, {<<"proxy">>,11211}, {<<"direct">>,11210}]}}]}} [cluster:debug,2014-08-19T16:49:02.979,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:verify_otp_connectivity:578]port_please("ns_1", "10.242.238.90") = 21101 [ns_server:debug,2014-08-19T16:49:02.980,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:02.980,ns_1@10.242.238.88:ns_config_events<0.17896.0>:ns_node_disco_conf_events:handle_event:44]ns_node_disco_conf_events config on nodes_wanted [cluster:info,2014-08-19T16:49:02.981,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:node_add_transaction_finish:727]Started node add transaction by adding node 'ns_1@10.242.238.90' to nodes_wanted (group: undefined) [ns_server:debug,2014-08-19T16:49:02.981,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: server_groups -> [[{uuid,<<"0">>}, {name,<<"Group 1">>}, {nodes,['ns_1@10.242.238.88','ns_1@10.242.238.89','ns_1@10.242.238.90']}]] [ns_server:debug,2014-08-19T16:49:02.981,ns_1@10.242.238.88:mb_master<0.20995.0>:mb_master:update_peers:506]List of peers has changed from ['ns_1@10.242.238.88','ns_1@10.242.238.89'] to ['ns_1@10.242.238.88', 'ns_1@10.242.238.89', 'ns_1@10.242.238.90'] [ns_server:debug,2014-08-19T16:49:02.981,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([nodes_wanted,server_groups, {node,'ns_1@10.242.238.90',membership}]..) [ns_server:debug,2014-08-19T16:49:02.981,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',membership} -> inactiveAdded [ns_server:debug,2014-08-19T16:49:02.981,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:02.981,ns_1@10.242.238.88:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T16:49:02.981,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@10.242.238.88','ns_1@10.242.238.89','ns_1@10.242.238.90'] [ns_server:debug,2014-08-19T16:49:02.981,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:02.981,ns_1@10.242.238.88:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T16:49:02.981,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [cluster:debug,2014-08-19T16:49:02.983,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:do_add_node_engaged_inner:649]Posting the following to complete_join on "10.242.238.90:8091": {struct, [{<<"targetNode">>,'ns_1@10.242.238.90'}, {availableStorage, {struct, [{hdd, [{struct, [{path,<<"/">>}, {sizeKBytes,103212320}, {usagePercent,3}]}, {struct, [{path,<<"/dev/shm">>}, {sizeKBytes,49515824}, {usagePercent,0}]}, {struct, [{path,<<"/boot">>}, {sizeKBytes,198337}, {usagePercent,17}]}, {struct, [{path,<<"/data">>}, {sizeKBytes,329573012}, {usagePercent,1}]}, {struct, [{path,<<"/test">>}, {sizeKBytes,528447160}, {usagePercent,1}]}, {struct, [{path,<<"/var/lib/pgsql">>}, {sizeKBytes,1922866992}, {usagePercent,1}]}]}]}}, {memoryQuota,90112}, {storageTotals, {struct, [{ram, {struct, [{total,101408407552}, {quotaTotal,94489280512}, {quotaUsed,13369344000}, {used,13174808576}, {usedByData,31847576}]}}, {hdd, {struct, [{total,1969015799808}, {quotaTotal,1969015799808}, {used,19690157998}, {usedByData,2736915}, {free,1949325641810}]}}]}}, {storage, {struct, [{ssd,[]}, {hdd, [{struct, [{path,<<"/var/lib/pgsql">>}, {index_path,<<"/var/lib/pgsql">>}, {quotaMb,none}, {state,ok}]}]}]}}, {systemStats, {struct, [{cpu_utilization_rate,0.6265664160401002}, {swap_total,0}, {swap_used,0}, {mem_total,101408407552}, {mem_free,89866596352}]}}, {interestingStats, {struct, [{cmd_get,0.0}, {couch_docs_actual_disk_size,2736915}, {couch_docs_data_size,2729956}, {couch_views_actual_disk_size,0}, {couch_views_data_size,0}, {curr_items,0}, {curr_items_tot,0}, {ep_bg_fetched,0.0}, {get_hits,0.0}, {mem_used,31847576}, {ops,0.0}, {vb_replica_curr_items,0}]}}, {uptime,<<"4088">>}, {memoryTotal,101408407552}, {memoryFree,89866596352}, {mcdMemoryReserved,77368}, {mcdMemoryAllocated,77368}, {couchApiBase,<<"http://10.242.238.88:8092/">>}, {otpCookie,<<"xyzevwdfypcplvpp">>}, {clusterMembership,<<"active">>}, {status,<<"healthy">>}, {otpNode,<<"ns_1@10.242.238.88">>}, {thisNode,true}, {hostname,<<"10.242.238.88:8091">>}, {clusterCompatibility,131077}, {version,<<"2.5.1-1083-rel-enterprise">>}, {os,<<"x86_64-unknown-linux-gnu">>}, {ports, {struct, [{httpsMgmt,18091}, {httpsCAPI,18092}, {sslProxy,11214}, {proxy,11211}, {direct,11210}]}}]} [ns_server:debug,2014-08-19T16:49:03.025,ns_1@10.242.238.88:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T16:49:03.025,ns_1@10.242.238.88:<0.23069.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@10.242.238.88', 'ns_1@10.242.238.89', 'ns_1@10.242.238.90'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:49:03.033,ns_1@10.242.238.88:<0.23069.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@10.242.238.88','ns_1@10.242.238.89'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:49:03.056,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 518. Nacking mccouch update. [views:debug,2014-08-19T16:49:03.056,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/518. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:03.056,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",518,active,0} [ns_server:debug,2014-08-19T16:49:03.056,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,532,958,894,830,1022,762,698,634, 570,996,932,868,804,736,672,608,544,970,906,842,778,710,646,582,518,944,880, 816,1008,748,684,620,556,982,918,854,790,722,658,594,530,956,892,828,1020, 760,696,632,568,994,930,866,802,734,670,606,542,968,904,840,776,708,644,580, 974,942,910,878,846,814,782,1006,746,714,682,650,618,586,554,522,980,948,916, 884,852,820,788,1012,752,720,688,656,624,592,560,528,986,954,922,890,858,826, 794,1018,758,726,694,662,630,598,566,534,992,960,928,896,864,832,800,768,764, 732,700,668,636,604,572,540,998,966,934,902,870,838,806,774,738,706,674,642, 610,578,546,972,940,908,876,844,812,780,1004,744,712,680,648,616,584,552,520, 1023,978,946,914,882,850,818,786,1010,718,654,590,526,952,888,824,1016,756, 692,628,564,990,926,862,798,730,666,602,538,964,900,836,772,704,640,576,938, 874,810,1002,742,678,614,550,976,912,848,784,716,652,588,524,950,886,822, 1014,754,690,626,562,988,924,860,796,728,664,600,536,962,898,834,770,766,702, 638,574,936,872,808,1000,740,676,612,548] [views:debug,2014-08-19T16:49:03.140,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/518. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:03.140,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",518,active,0} [ns_server:debug,2014-08-19T16:49:03.306,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 516. Nacking mccouch update. [views:debug,2014-08-19T16:49:03.307,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/516. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:03.307,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",516,active,0} [ns_server:debug,2014-08-19T16:49:03.307,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,532,958,894,830,1022,762,698,634, 570,996,932,868,804,736,672,608,544,970,906,842,778,710,646,582,518,944,880, 816,1008,748,684,620,556,982,918,854,790,722,658,594,530,956,892,828,1020, 760,696,632,568,994,930,866,802,734,670,606,542,968,904,840,776,708,644,580, 516,974,942,910,878,846,814,782,1006,746,714,682,650,618,586,554,522,980,948, 916,884,852,820,788,1012,752,720,688,656,624,592,560,528,986,954,922,890,858, 826,794,1018,758,726,694,662,630,598,566,534,992,960,928,896,864,832,800,768, 764,732,700,668,636,604,572,540,998,966,934,902,870,838,806,774,738,706,674, 642,610,578,546,972,940,908,876,844,812,780,1004,744,712,680,648,616,584,552, 520,1023,978,946,914,882,850,818,786,1010,718,654,590,526,952,888,824,1016, 756,692,628,564,990,926,862,798,730,666,602,538,964,900,836,772,704,640,576, 938,874,810,1002,742,678,614,550,976,912,848,784,716,652,588,524,950,886,822, 1014,754,690,626,562,988,924,860,796,728,664,600,536,962,898,834,770,766,702, 638,574,936,872,808,1000,740,676,612,548] [ns_server:debug,2014-08-19T16:49:03.345,ns_1@10.242.238.88:<0.18065.0>:xdc_rdoc_replication_srv:nodeup_monitoring_loop:46]got nodeup event. Considering rdocs replication [ns_server:debug,2014-08-19T16:49:03.345,ns_1@10.242.238.88:<0.19195.0>:capi_set_view_manager:nodeup_monitoring_loop:176]got nodeup event. Considering ddocs replication [user:info,2014-08-19T16:49:03.345,ns_1@10.242.238.88:ns_node_disco<0.17920.0>:ns_node_disco:handle_info:159]Node 'ns_1@10.242.238.88' saw that node 'ns_1@10.242.238.90' came up. Tags: [] [ns_server:debug,2014-08-19T16:49:03.345,ns_1@10.242.238.88:xdc_rdoc_replication_srv<0.18064.0>:xdc_rdoc_replication_srv:handle_info:154]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:03.345,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:03.345,ns_1@10.242.238.88:ns_node_disco_events<0.17919.0>:ns_node_disco_rep_events:handle_event:42]Detected a new nodes (['ns_1@10.242.238.90']). Moving config around. [ns_server:warn,2014-08-19T16:49:03.345,ns_1@10.242.238.88:xdc_rdoc_replication_srv<0.18064.0>:xdc_rdoc_replication_srv:handle_info:150]Remote server node {xdc_rdoc_replication_srv,'ns_1@10.242.238.90'} process down: noproc [ns_server:info,2014-08-19T16:49:03.345,ns_1@10.242.238.88:ns_node_disco_events<0.17919.0>:ns_node_disco_log:handle_event:46]ns_node_disco_log: nodes changed: ['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90'] [views:debug,2014-08-19T16:49:03.390,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/516. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:03.391,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",516,active,0} [ns_server:debug,2014-08-19T16:49:03.413,ns_1@10.242.238.88:ns_config_events<0.17896.0>:ns_node_disco_conf_events:handle_event:50]ns_node_disco_conf_events config on otp [ns_server:debug,2014-08-19T16:49:03.413,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:03.413,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: otp -> [{cookie,xyzevwdfypcplvpp}] [ns_server:debug,2014-08-19T16:49:03.413,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',capi_port} -> 8092 [ns_server:debug,2014-08-19T16:49:03.413,ns_1@10.242.238.88:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T16:49:03.413,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:03.413,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T16:49:03.413,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:03.413,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',config_version} -> {2,3,0} [ns_server:debug,2014-08-19T16:49:03.413,ns_1@10.242.238.88:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T16:49:03.414,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T16:49:03.414,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:03.414,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:03.414,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:03.414,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T16:49:03.414,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:03.414,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T16:49:03.414,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:03.414,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [ns_server:debug,2014-08-19T16:49:03.414,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:03.414,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:03.415,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:03.415,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:03.415,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:03.415,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:03.415,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T16:49:03.415,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:03.415,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T16:49:03.415,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T16:49:03.415,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T16:49:03.415,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T16:49:03.415,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',ssl_rest_port} -> 18091 [ns_server:debug,2014-08-19T16:49:03.451,ns_1@10.242.238.88:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T16:49:03.451,ns_1@10.242.238.88:<0.23097.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@10.242.238.88', 'ns_1@10.242.238.89', 'ns_1@10.242.238.90'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:49:03.452,ns_1@10.242.238.88:<0.23097.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:49:03.553,ns_1@10.242.238.88:xdc_rdoc_replication_srv<0.18064.0>:xdc_rdoc_replication_srv:handle_info:154]doing replicate_newnodes_docs [cluster:debug,2014-08-19T16:49:03.555,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:do_add_node_engaged_inner:656]Reply from complete_join on "10.242.238.90:8091": {ok,[]} [cluster:debug,2014-08-19T16:49:03.555,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:handle_call:155]add_node("10.242.238.90", 8091, undefined, ..) -> {ok,'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:49:03.566,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 514. Nacking mccouch update. [views:debug,2014-08-19T16:49:03.566,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/514. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:03.566,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",514,active,0} [ns_server:debug,2014-08-19T16:49:03.566,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,532,958,894,830,1022,762,698,634, 570,996,932,868,804,736,672,608,544,970,906,842,778,710,646,582,518,944,880, 816,1008,748,684,620,556,982,918,854,790,722,658,594,530,956,892,828,1020, 760,696,632,568,994,930,866,802,734,670,606,542,968,904,840,776,708,644,580, 516,942,878,814,1006,746,714,682,650,618,586,554,522,980,948,916,884,852,820, 788,1012,752,720,688,656,624,592,560,528,986,954,922,890,858,826,794,1018, 758,726,694,662,630,598,566,534,992,960,928,896,864,832,800,768,764,732,700, 668,636,604,572,540,998,966,934,902,870,838,806,774,738,706,674,642,610,578, 546,514,972,940,908,876,844,812,780,1004,744,712,680,648,616,584,552,520, 1023,978,946,914,882,850,818,786,1010,718,654,590,526,952,888,824,1016,756, 692,628,564,990,926,862,798,730,666,602,538,964,900,836,772,704,640,576,938, 874,810,1002,742,678,614,550,976,912,848,784,716,652,588,524,950,886,822, 1014,754,690,626,562,988,924,860,796,728,664,600,536,962,898,834,770,766,702, 638,574,936,872,808,1000,740,676,612,548,974,910,846,782] [views:debug,2014-08-19T16:49:03.650,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/514. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:03.650,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",514,active,0} [ns_server:debug,2014-08-19T16:49:03.800,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 512. Nacking mccouch update. [views:debug,2014-08-19T16:49:03.800,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/512. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:03.800,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",512,active,0} [ns_server:debug,2014-08-19T16:49:03.801,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,532,958,894,830,1022,762,698,634, 570,996,932,868,804,736,672,608,544,970,906,842,778,710,646,582,518,944,880, 816,1008,748,684,620,556,982,918,854,790,722,658,594,530,956,892,828,1020, 760,696,632,568,994,930,866,802,734,670,606,542,968,904,840,776,708,644,580, 516,942,878,814,1006,746,714,682,650,618,586,554,522,980,948,916,884,852,820, 788,1012,752,720,688,656,624,592,560,528,986,954,922,890,858,826,794,1018, 758,726,694,662,630,598,566,534,992,960,928,896,864,832,800,768,764,732,700, 668,636,604,572,540,998,966,934,902,870,838,806,774,738,706,674,642,610,578, 546,514,972,940,908,876,844,812,780,1004,744,712,680,648,616,584,552,520, 1023,978,946,914,882,850,818,786,1010,718,654,590,526,952,888,824,1016,756, 692,628,564,990,926,862,798,730,666,602,538,964,900,836,772,704,640,576,512, 938,874,810,1002,742,678,614,550,976,912,848,784,716,652,588,524,950,886,822, 1014,754,690,626,562,988,924,860,796,728,664,600,536,962,898,834,770,766,702, 638,574,936,872,808,1000,740,676,612,548,974,910,846,782] [views:debug,2014-08-19T16:49:03.858,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/512. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:03.858,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",512,active,0} [ns_server:debug,2014-08-19T16:49:03.950,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 510. Nacking mccouch update. [views:debug,2014-08-19T16:49:03.950,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/510. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:03.950,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",510,active,0} [ns_server:debug,2014-08-19T16:49:03.951,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,532,958,894,830,1022,762,698,634, 570,996,932,868,804,736,672,608,544,970,906,842,778,710,646,582,518,944,880, 816,1008,748,684,620,556,982,918,854,790,722,658,594,530,956,892,828,1020, 760,696,632,568,994,930,866,802,734,670,606,542,968,904,840,776,708,644,580, 516,942,878,814,1006,746,714,682,650,618,586,554,522,980,948,916,884,852,820, 788,1012,752,720,688,656,624,592,560,528,986,954,922,890,858,826,794,1018, 758,726,694,662,630,598,566,534,992,960,928,896,864,832,800,768,764,732,700, 668,636,604,572,540,998,966,934,902,870,838,806,774,738,706,674,642,610,578, 546,514,972,940,908,876,844,812,780,510,1004,744,712,680,648,616,584,552,520, 1023,978,946,914,882,850,818,786,1010,718,654,590,526,952,888,824,1016,756, 692,628,564,990,926,862,798,730,666,602,538,964,900,836,772,704,640,576,512, 938,874,810,1002,742,678,614,550,976,912,848,784,716,652,588,524,950,886,822, 1014,754,690,626,562,988,924,860,796,728,664,600,536,962,898,834,770,766,702, 638,574,936,872,808,1000,740,676,612,548,974,910,846,782] [views:debug,2014-08-19T16:49:03.984,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/510. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:03.984,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",510,active,0} [ns_server:debug,2014-08-19T16:49:04.076,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 508. Nacking mccouch update. [views:debug,2014-08-19T16:49:04.076,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/508. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:04.076,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",508,active,0} [ns_server:debug,2014-08-19T16:49:04.076,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,532,958,894,830,1022,762,698,634, 570,996,932,868,804,736,672,608,544,970,906,842,778,710,646,582,518,944,880, 816,1008,748,684,620,556,982,918,854,790,722,658,594,530,956,892,828,1020, 760,696,632,568,994,930,866,802,734,670,606,542,968,904,840,776,708,644,580, 516,942,878,814,1006,746,714,682,650,618,586,554,522,980,948,916,884,852,820, 788,1012,752,720,688,656,624,592,560,528,986,954,922,890,858,826,794,1018, 758,726,694,662,630,598,566,534,992,960,928,896,864,832,800,768,764,732,700, 668,636,604,572,540,998,966,934,902,870,838,806,774,738,706,674,642,610,578, 546,514,972,940,908,876,844,812,780,510,1004,744,712,680,648,616,584,552,520, 1023,978,946,914,882,850,818,786,1010,718,654,590,526,952,888,824,1016,756, 692,628,564,990,926,862,798,730,666,602,538,964,900,836,772,704,640,576,512, 938,874,810,508,1002,742,678,614,550,976,912,848,784,716,652,588,524,950,886, 822,1014,754,690,626,562,988,924,860,796,728,664,600,536,962,898,834,770,766, 702,638,574,936,872,808,1000,740,676,612,548,974,910,846,782] [views:debug,2014-08-19T16:49:04.110,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/508. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:04.110,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",508,active,0} [ns_server:debug,2014-08-19T16:49:04.218,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 506. Nacking mccouch update. [views:debug,2014-08-19T16:49:04.218,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/506. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:04.218,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",506,active,0} [ns_server:debug,2014-08-19T16:49:04.219,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,532,958,894,830,1022,762,698,634, 570,996,932,868,804,736,672,608,544,970,906,842,778,710,646,582,518,944,880, 816,1008,748,684,620,556,982,918,854,790,722,658,594,530,956,892,828,1020, 760,696,632,568,994,930,866,802,734,670,606,542,968,904,840,776,708,644,580, 516,942,878,814,1006,746,714,682,650,618,586,554,522,980,948,916,884,852,820, 788,1012,752,720,688,656,624,592,560,528,986,954,922,890,858,826,794,1018, 758,726,694,662,630,598,566,534,992,960,928,896,864,832,800,768,764,732,700, 668,636,604,572,540,998,966,934,902,870,838,806,774,738,706,674,642,610,578, 546,514,972,940,908,876,844,812,780,510,1004,744,712,680,648,616,584,552,520, 1023,978,946,914,882,850,818,786,1010,718,654,590,526,952,888,824,1016,756, 692,628,564,990,926,862,798,730,666,602,538,964,900,836,772,704,640,576,512, 938,874,810,508,1002,742,678,614,550,976,912,848,784,716,652,588,524,950,886, 822,1014,754,690,626,562,988,924,860,796,728,664,600,536,962,898,834,770,766, 702,638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782] [views:debug,2014-08-19T16:49:04.277,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/506. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:04.277,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",506,active,0} [ns_server:debug,2014-08-19T16:49:04.344,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 504. Nacking mccouch update. [views:debug,2014-08-19T16:49:04.344,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/504. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:04.344,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",504,active,0} [ns_server:debug,2014-08-19T16:49:04.345,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,532,958,894,830,1022,762,698,634, 570,996,932,868,804,736,672,608,544,970,906,842,778,710,646,582,518,944,880, 816,1008,748,684,620,556,982,918,854,790,722,658,594,530,956,892,828,1020, 760,696,632,568,994,930,866,802,734,670,606,542,968,904,840,776,708,644,580, 516,942,878,814,1006,746,682,618,554,980,948,916,884,852,820,788,1012,752, 720,688,656,624,592,560,528,986,954,922,890,858,826,794,1018,758,726,694,662, 630,598,566,534,992,960,928,896,864,832,800,768,764,732,700,668,636,604,572, 540,998,966,934,902,870,838,806,774,504,738,706,674,642,610,578,546,514,972, 940,908,876,844,812,780,510,1004,744,712,680,648,616,584,552,520,1023,978, 946,914,882,850,818,786,1010,718,654,590,526,952,888,824,1016,756,692,628, 564,990,926,862,798,730,666,602,538,964,900,836,772,704,640,576,512,938,874, 810,508,1002,742,678,614,550,976,912,848,784,716,652,588,524,950,886,822, 1014,754,690,626,562,988,924,860,796,728,664,600,536,962,898,834,770,766,702, 638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,714,650,586,522] [views:debug,2014-08-19T16:49:04.378,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/504. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:04.378,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",504,active,0} [ns_server:debug,2014-08-19T16:49:04.458,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 502. Nacking mccouch update. [views:debug,2014-08-19T16:49:04.459,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/502. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:04.459,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",502,active,0} [ns_server:debug,2014-08-19T16:49:04.459,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,532,958,894,830,1022,762,698,634, 570,996,932,868,804,502,736,672,608,544,970,906,842,778,710,646,582,518,944, 880,816,1008,748,684,620,556,982,918,854,790,722,658,594,530,956,892,828, 1020,760,696,632,568,994,930,866,802,734,670,606,542,968,904,840,776,708,644, 580,516,942,878,814,1006,746,682,618,554,980,948,916,884,852,820,788,1012, 752,720,688,656,624,592,560,528,986,954,922,890,858,826,794,1018,758,726,694, 662,630,598,566,534,992,960,928,896,864,832,800,768,764,732,700,668,636,604, 572,540,998,966,934,902,870,838,806,774,504,738,706,674,642,610,578,546,514, 972,940,908,876,844,812,780,510,1004,744,712,680,648,616,584,552,520,1023, 978,946,914,882,850,818,786,1010,718,654,590,526,952,888,824,1016,756,692, 628,564,990,926,862,798,730,666,602,538,964,900,836,772,704,640,576,512,938, 874,810,508,1002,742,678,614,550,976,912,848,784,716,652,588,524,950,886,822, 1014,754,690,626,562,988,924,860,796,728,664,600,536,962,898,834,770,766,702, 638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,714,650,586,522] [views:debug,2014-08-19T16:49:04.517,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/502. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:04.518,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",502,active,0} [ns_server:debug,2014-08-19T16:49:04.609,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 500. Nacking mccouch update. [views:debug,2014-08-19T16:49:04.609,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/500. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:04.609,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",500,active,0} [ns_server:debug,2014-08-19T16:49:04.610,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,532,958,894,830,1022,762,698,634, 570,996,932,868,804,502,736,672,608,544,970,906,842,778,710,646,582,518,944, 880,816,1008,748,684,620,556,982,918,854,790,722,658,594,530,956,892,828, 1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968,904,840,776,708, 644,580,516,942,878,814,1006,746,682,618,554,980,948,916,884,852,820,788, 1012,752,720,688,656,624,592,560,528,986,954,922,890,858,826,794,1018,758, 726,694,662,630,598,566,534,992,960,928,896,864,832,800,768,764,732,700,668, 636,604,572,540,998,966,934,902,870,838,806,774,504,738,706,674,642,610,578, 546,514,972,940,908,876,844,812,780,510,1004,744,712,680,648,616,584,552,520, 1023,978,946,914,882,850,818,786,1010,718,654,590,526,952,888,824,1016,756, 692,628,564,990,926,862,798,730,666,602,538,964,900,836,772,704,640,576,512, 938,874,810,508,1002,742,678,614,550,976,912,848,784,716,652,588,524,950,886, 822,1014,754,690,626,562,988,924,860,796,728,664,600,536,962,898,834,770,766, 702,638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,714,650,586, 522] [views:debug,2014-08-19T16:49:04.661,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/500. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:04.661,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",500,active,0} [ns_server:debug,2014-08-19T16:49:04.728,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 498. Nacking mccouch update. [views:debug,2014-08-19T16:49:04.728,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/498. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:04.728,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",498,active,0} [ns_server:debug,2014-08-19T16:49:04.729,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,532,958,894,830,1022,762,698,634, 570,996,932,868,804,502,736,672,608,544,970,906,842,778,710,646,582,518,944, 880,816,1008,748,684,620,556,982,918,854,790,722,658,594,530,956,892,828, 1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968,904,840,776,708, 644,580,516,942,878,814,1006,746,682,618,554,980,948,916,884,852,820,788, 1012,752,720,688,656,624,592,560,528,986,954,922,890,858,826,794,1018,758, 726,694,662,630,598,566,534,992,960,928,896,864,832,800,768,498,764,732,700, 668,636,604,572,540,998,966,934,902,870,838,806,774,504,738,706,674,642,610, 578,546,514,972,940,908,876,844,812,780,510,1004,744,712,680,648,616,584,552, 520,1023,978,946,914,882,850,818,786,1010,718,654,590,526,952,888,824,1016, 756,692,628,564,990,926,862,798,730,666,602,538,964,900,836,772,704,640,576, 512,938,874,810,508,1002,742,678,614,550,976,912,848,784,716,652,588,524,950, 886,822,1014,754,690,626,562,988,924,860,796,728,664,600,536,962,898,834,770, 766,702,638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,714,650, 586,522] [views:debug,2014-08-19T16:49:04.762,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/498. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:04.762,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",498,active,0} [ns_server:debug,2014-08-19T16:49:04.829,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 496. Nacking mccouch update. [views:debug,2014-08-19T16:49:04.829,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/496. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:04.829,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",496,active,0} [ns_server:debug,2014-08-19T16:49:04.830,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,532,958,894,830,1022,762,698,634, 570,996,932,868,804,502,736,672,608,544,970,906,842,778,710,646,582,518,944, 880,816,1008,748,684,620,556,982,918,854,790,722,658,594,530,956,892,828, 1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968,904,840,776,708, 644,580,516,942,878,814,1006,746,682,618,554,980,948,916,884,852,820,788, 1012,752,720,688,656,624,592,560,528,986,954,922,890,858,826,794,1018,758, 726,694,662,630,598,566,534,992,960,928,896,864,832,800,768,498,764,732,700, 668,636,604,572,540,998,966,934,902,870,838,806,774,504,738,706,674,642,610, 578,546,514,972,940,908,876,844,812,780,510,1004,744,712,680,648,616,584,552, 520,1023,978,946,914,882,850,818,786,1010,718,654,590,526,952,888,824,1016, 756,692,628,564,990,926,862,798,496,730,666,602,538,964,900,836,772,704,640, 576,512,938,874,810,508,1002,742,678,614,550,976,912,848,784,716,652,588,524, 950,886,822,1014,754,690,626,562,988,924,860,796,728,664,600,536,962,898,834, 770,766,702,638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,714, 650,586,522] [views:debug,2014-08-19T16:49:04.863,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/496. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:04.863,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",496,active,0} [ns_server:debug,2014-08-19T16:49:04.944,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 494. Nacking mccouch update. [views:debug,2014-08-19T16:49:04.944,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/494. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:04.945,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",494,active,0} [ns_server:debug,2014-08-19T16:49:04.945,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,532,958,894,830,1022,762,698,634, 570,996,932,868,804,502,736,672,608,544,970,906,842,778,710,646,582,518,944, 880,816,1008,748,684,620,556,982,918,854,790,722,658,594,530,956,892,828, 1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968,904,840,776,708, 644,580,516,942,878,814,1006,746,682,618,554,980,916,852,788,752,720,688,656, 624,592,560,528,986,954,922,890,858,826,794,1018,758,726,694,662,630,598,566, 534,992,960,928,896,864,832,800,768,498,764,732,700,668,636,604,572,540,998, 966,934,902,870,838,806,774,504,738,706,674,642,610,578,546,514,972,940,908, 876,844,812,780,510,1004,744,712,680,648,616,584,552,520,1023,978,946,914, 882,850,818,786,1010,718,654,590,526,952,888,824,1016,756,692,628,564,990, 926,862,798,496,730,666,602,538,964,900,836,772,704,640,576,512,938,874,810, 508,1002,742,678,614,550,976,912,848,784,716,652,588,524,950,886,822,1014, 754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,766,702, 638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,714,650,586,522, 948,884,820,1012] [views:debug,2014-08-19T16:49:05.003,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/494. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:05.004,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",494,active,0} [ns_server:debug,2014-08-19T16:49:05.170,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 492. Nacking mccouch update. [views:debug,2014-08-19T16:49:05.170,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/492. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:05.170,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",492,active,0} [ns_server:debug,2014-08-19T16:49:05.171,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,724,660,596,532,958,894,830,1022,762,698,634, 570,996,932,868,804,502,736,672,608,544,970,906,842,778,710,646,582,518,944, 880,816,1008,748,684,620,556,982,918,854,790,722,658,594,530,956,892,828, 1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968,904,840,776,708, 644,580,516,942,878,814,1006,746,682,618,554,980,916,852,788,752,720,688,656, 624,592,560,528,986,954,922,890,858,826,794,492,1018,758,726,694,662,630,598, 566,534,992,960,928,896,864,832,800,768,498,764,732,700,668,636,604,572,540, 998,966,934,902,870,838,806,774,504,738,706,674,642,610,578,546,514,972,940, 908,876,844,812,780,510,1004,744,712,680,648,616,584,552,520,1023,978,946, 914,882,850,818,786,1010,718,654,590,526,952,888,824,1016,756,692,628,564, 990,926,862,798,496,730,666,602,538,964,900,836,772,704,640,576,512,938,874, 810,508,1002,742,678,614,550,976,912,848,784,716,652,588,524,950,886,822, 1014,754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,766, 702,638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,714,650,586, 522,948,884,820,1012] [views:debug,2014-08-19T16:49:05.231,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/492. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:05.231,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",492,active,0} [ns_server:debug,2014-08-19T16:49:05.397,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 490. Nacking mccouch update. [views:debug,2014-08-19T16:49:05.397,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/490. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:05.397,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",490,active,0} [ns_server:debug,2014-08-19T16:49:05.398,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,1022,762,698, 634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,710,646,582,518, 944,880,816,1008,748,684,620,556,982,918,854,790,722,658,594,530,956,892,828, 1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968,904,840,776,708, 644,580,516,942,878,814,1006,746,682,618,554,980,916,852,788,752,720,688,656, 624,592,560,528,986,954,922,890,858,826,794,492,1018,758,726,694,662,630,598, 566,534,992,960,928,896,864,832,800,768,498,764,732,700,668,636,604,572,540, 998,966,934,902,870,838,806,774,504,738,706,674,642,610,578,546,514,972,940, 908,876,844,812,780,510,1004,744,712,680,648,616,584,552,520,1023,978,946, 914,882,850,818,786,1010,718,654,590,526,952,888,824,1016,756,692,628,564, 990,926,862,798,496,730,666,602,538,964,900,836,772,704,640,576,512,938,874, 810,508,1002,742,678,614,550,976,912,848,784,716,652,588,524,950,886,822, 1014,754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,766, 702,638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,714,650,586, 522,948,884,820,1012] [views:debug,2014-08-19T16:49:05.482,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/490. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:05.482,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",490,active,0} [ns_server:debug,2014-08-19T16:49:05.598,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 488. Nacking mccouch update. [views:debug,2014-08-19T16:49:05.598,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/488. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:05.598,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",488,active,0} [ns_server:debug,2014-08-19T16:49:05.599,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,1022,762,698, 634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,710,646,582,518, 944,880,816,1008,748,684,620,556,982,918,854,790,488,722,658,594,530,956,892, 828,1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968,904,840,776, 708,644,580,516,942,878,814,1006,746,682,618,554,980,916,852,788,752,720,688, 656,624,592,560,528,986,954,922,890,858,826,794,492,1018,758,726,694,662,630, 598,566,534,992,960,928,896,864,832,800,768,498,764,732,700,668,636,604,572, 540,998,966,934,902,870,838,806,774,504,738,706,674,642,610,578,546,514,972, 940,908,876,844,812,780,510,1004,744,712,680,648,616,584,552,520,1023,978, 946,914,882,850,818,786,1010,718,654,590,526,952,888,824,1016,756,692,628, 564,990,926,862,798,496,730,666,602,538,964,900,836,772,704,640,576,512,938, 874,810,508,1002,742,678,614,550,976,912,848,784,716,652,588,524,950,886,822, 1014,754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,766, 702,638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,714,650,586, 522,948,884,820,1012] [views:debug,2014-08-19T16:49:05.657,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/488. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:05.657,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",488,active,0} [ns_server:debug,2014-08-19T16:49:05.749,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 486. Nacking mccouch update. [views:debug,2014-08-19T16:49:05.749,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/486. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:05.749,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",486,active,0} [ns_server:debug,2014-08-19T16:49:05.750,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,1022,762,698, 634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,710,646,582,518, 944,880,816,1008,748,684,620,556,982,918,854,790,488,722,658,594,530,956,892, 828,1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968,904,840,776, 708,644,580,516,942,878,814,1006,746,682,618,554,980,916,852,788,486,752,720, 688,656,624,592,560,528,986,954,922,890,858,826,794,492,1018,758,726,694,662, 630,598,566,534,992,960,928,896,864,832,800,768,498,764,732,700,668,636,604, 572,540,998,966,934,902,870,838,806,774,504,738,706,674,642,610,578,546,514, 972,940,908,876,844,812,780,510,1004,744,712,680,648,616,584,552,520,1023, 978,946,914,882,850,818,786,1010,718,654,590,526,952,888,824,1016,756,692, 628,564,990,926,862,798,496,730,666,602,538,964,900,836,772,704,640,576,512, 938,874,810,508,1002,742,678,614,550,976,912,848,784,716,652,588,524,950,886, 822,1014,754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770, 766,702,638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,714,650, 586,522,948,884,820,1012] [views:debug,2014-08-19T16:49:05.783,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/486. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:05.783,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",486,active,0} [ns_server:debug,2014-08-19T16:49:05.938,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 484. Nacking mccouch update. [views:debug,2014-08-19T16:49:05.938,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/484. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:05.939,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",484,active,0} [ns_server:debug,2014-08-19T16:49:05.939,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,1022,762,698, 634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,710,646,582,518, 944,880,816,1008,748,684,620,556,982,918,854,790,488,722,658,594,530,956,892, 828,1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968,904,840,776, 708,644,580,516,942,878,814,1006,746,682,618,554,980,916,852,788,486,720,656, 592,528,986,954,922,890,858,826,794,492,1018,758,726,694,662,630,598,566,534, 992,960,928,896,864,832,800,768,498,764,732,700,668,636,604,572,540,998,966, 934,902,870,838,806,774,504,738,706,674,642,610,578,546,514,972,940,908,876, 844,812,780,510,1004,744,712,680,648,616,584,552,520,1023,978,946,914,882, 850,818,786,484,1010,718,654,590,526,952,888,824,1016,756,692,628,564,990, 926,862,798,496,730,666,602,538,964,900,836,772,704,640,576,512,938,874,810, 508,1002,742,678,614,550,976,912,848,784,716,652,588,524,950,886,822,1014, 754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,766,702, 638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,714,650,586,522, 948,884,820,1012,752,688,624,560] [views:debug,2014-08-19T16:49:06.006,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/484. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:06.006,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",484,active,0} [ns_server:debug,2014-08-19T16:49:06.072,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 482. Nacking mccouch update. [views:debug,2014-08-19T16:49:06.073,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/482. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:06.073,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",482,active,0} [ns_server:debug,2014-08-19T16:49:06.073,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,1022,762,698, 634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,710,646,582,518, 944,880,816,1008,748,684,620,556,982,918,854,790,488,722,658,594,530,956,892, 828,1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968,904,840,776, 708,644,580,516,942,878,814,1006,746,682,618,554,980,916,852,788,486,720,656, 592,528,986,954,922,890,858,826,794,492,1018,758,726,694,662,630,598,566,534, 992,960,928,896,864,832,800,768,498,764,732,700,668,636,604,572,540,998,966, 934,902,870,838,806,774,504,738,706,674,642,610,578,546,514,972,940,908,876, 844,812,780,510,1004,744,712,680,648,616,584,552,520,1023,978,946,914,882, 850,818,786,484,1010,718,654,590,526,952,888,824,1016,756,692,628,564,990, 926,862,798,496,730,666,602,538,964,900,836,772,704,640,576,512,938,874,810, 508,1002,742,678,614,550,976,912,848,784,482,716,652,588,524,950,886,822, 1014,754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,766, 702,638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,714,650,586, 522,948,884,820,1012,752,688,624,560] [views:debug,2014-08-19T16:49:06.106,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/482. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:06.107,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",482,active,0} [ns_server:debug,2014-08-19T16:49:06.198,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 480. Nacking mccouch update. [views:debug,2014-08-19T16:49:06.198,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/480. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:06.198,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",480,active,0} [ns_server:debug,2014-08-19T16:49:06.199,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,1022,762,698, 634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,710,646,582,518, 944,880,816,1008,748,684,620,556,982,918,854,790,488,722,658,594,530,956,892, 828,1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968,904,840,776, 708,644,580,516,942,878,814,1006,746,682,618,554,980,916,852,788,486,720,656, 592,528,986,954,922,890,858,826,794,492,1018,758,726,694,662,630,598,566,534, 992,960,928,896,864,832,800,768,498,764,732,700,668,636,604,572,540,998,966, 934,902,870,838,806,774,504,738,706,674,642,610,578,546,514,972,940,908,876, 844,812,780,510,1004,744,712,680,648,616,584,552,520,1023,978,946,914,882, 850,818,786,484,1010,718,654,590,526,952,888,824,1016,756,692,628,564,990, 926,862,798,496,730,666,602,538,964,900,836,772,704,640,576,512,938,874,810, 508,1002,742,678,614,550,976,912,848,784,482,716,652,588,524,950,886,822, 1014,754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,766, 702,638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,480,714,650, 586,522,948,884,820,1012,752,688,624,560] [views:debug,2014-08-19T16:49:06.257,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/480. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:06.258,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",480,active,0} [ns_server:debug,2014-08-19T16:49:06.349,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 478. Nacking mccouch update. [views:debug,2014-08-19T16:49:06.349,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/478. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:06.349,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",478,active,0} [ns_server:debug,2014-08-19T16:49:06.350,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,1022,762,698, 634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,710,646,582,518, 944,880,816,1008,748,684,620,556,982,918,854,790,488,722,658,594,530,956,892, 828,1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968,904,840,776, 708,644,580,516,942,878,814,1006,746,682,618,554,980,916,852,788,486,720,656, 592,528,986,954,922,890,858,826,794,492,1018,758,726,694,662,630,598,566,534, 992,960,928,896,864,832,800,768,498,764,732,700,668,636,604,572,540,998,966, 934,902,870,838,806,774,504,738,706,674,642,610,578,546,514,972,940,908,876, 844,812,780,510,478,1004,744,712,680,648,616,584,552,520,1023,978,946,914, 882,850,818,786,484,1010,718,654,590,526,952,888,824,1016,756,692,628,564, 990,926,862,798,496,730,666,602,538,964,900,836,772,704,640,576,512,938,874, 810,508,1002,742,678,614,550,976,912,848,784,482,716,652,588,524,950,886,822, 1014,754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,766, 702,638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,480,714,650, 586,522,948,884,820,1012,752,688,624,560] [views:debug,2014-08-19T16:49:06.408,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/478. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:06.408,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",478,active,0} [ns_server:debug,2014-08-19T16:49:06.516,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 476. Nacking mccouch update. [views:debug,2014-08-19T16:49:06.516,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/476. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:06.516,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",476,active,0} [ns_server:debug,2014-08-19T16:49:06.517,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,1022,762,698, 634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,476,710,646,582, 518,944,880,816,1008,748,684,620,556,982,918,854,790,488,722,658,594,530,956, 892,828,1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968,904,840, 776,708,644,580,516,942,878,814,1006,746,682,618,554,980,916,852,788,486,720, 656,592,528,986,954,922,890,858,826,794,492,1018,758,726,694,662,630,598,566, 534,992,960,928,896,864,832,800,768,498,764,732,700,668,636,604,572,540,998, 966,934,902,870,838,806,774,504,738,706,674,642,610,578,546,514,972,940,908, 876,844,812,780,510,478,1004,744,712,680,648,616,584,552,520,1023,978,946, 914,882,850,818,786,484,1010,718,654,590,526,952,888,824,1016,756,692,628, 564,990,926,862,798,496,730,666,602,538,964,900,836,772,704,640,576,512,938, 874,810,508,1002,742,678,614,550,976,912,848,784,482,716,652,588,524,950,886, 822,1014,754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770, 766,702,638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,480,714, 650,586,522,948,884,820,1012,752,688,624,560] [views:debug,2014-08-19T16:49:06.550,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/476. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:06.550,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",476,active,0} [ns_server:debug,2014-08-19T16:49:06.642,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 474. Nacking mccouch update. [views:debug,2014-08-19T16:49:06.642,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/474. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:06.642,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",474,active,0} [ns_server:debug,2014-08-19T16:49:06.643,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,1022,762,698, 634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,476,710,646,582, 518,944,880,816,1008,748,684,620,556,982,918,854,790,488,722,658,594,530,956, 892,828,1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968,904,840, 776,474,708,644,580,516,942,878,814,1006,746,682,618,554,980,916,852,788,486, 720,656,592,528,954,890,826,1018,758,726,694,662,630,598,566,534,992,960,928, 896,864,832,800,768,498,764,732,700,668,636,604,572,540,998,966,934,902,870, 838,806,774,504,738,706,674,642,610,578,546,514,972,940,908,876,844,812,780, 510,478,1004,744,712,680,648,616,584,552,520,1023,978,946,914,882,850,818, 786,484,1010,718,654,590,526,952,888,824,1016,756,692,628,564,990,926,862, 798,496,730,666,602,538,964,900,836,772,704,640,576,512,938,874,810,508,1002, 742,678,614,550,976,912,848,784,482,716,652,588,524,950,886,822,1014,754,690, 626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,766,702,638,574, 936,872,808,506,1000,740,676,612,548,974,910,846,782,480,714,650,586,522,948, 884,820,1012,752,688,624,560,986,922,858,794,492] [views:debug,2014-08-19T16:49:06.676,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/474. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:06.676,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",474,active,0} [ns_server:debug,2014-08-19T16:49:06.834,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 472. Nacking mccouch update. [views:debug,2014-08-19T16:49:06.834,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/472. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:06.834,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",472,active,0} [ns_server:debug,2014-08-19T16:49:06.835,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,1022,762,698, 634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,476,710,646,582, 518,944,880,816,1008,748,684,620,556,982,918,854,790,488,722,658,594,530,956, 892,828,1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968,904,840, 776,474,708,644,580,516,942,878,814,1006,746,682,618,554,980,916,852,788,486, 720,656,592,528,954,890,826,1018,758,726,694,662,630,598,566,534,992,960,928, 896,864,832,800,768,498,764,732,700,668,636,604,572,540,998,966,934,902,870, 838,806,774,504,472,738,706,674,642,610,578,546,514,972,940,908,876,844,812, 780,510,478,1004,744,712,680,648,616,584,552,520,1023,978,946,914,882,850, 818,786,484,1010,718,654,590,526,952,888,824,1016,756,692,628,564,990,926, 862,798,496,730,666,602,538,964,900,836,772,704,640,576,512,938,874,810,508, 1002,742,678,614,550,976,912,848,784,482,716,652,588,524,950,886,822,1014, 754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,766,702, 638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,480,714,650,586, 522,948,884,820,1012,752,688,624,560,986,922,858,794,492] [views:debug,2014-08-19T16:49:06.901,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/472. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:06.902,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",472,active,0} [ns_server:debug,2014-08-19T16:49:07.043,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 470. Nacking mccouch update. [views:debug,2014-08-19T16:49:07.044,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/470. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:07.044,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",470,active,0} [ns_server:debug,2014-08-19T16:49:07.044,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,1022,762,698, 634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,476,710,646,582, 518,944,880,816,1008,748,684,620,556,982,918,854,790,488,722,658,594,530,956, 892,828,1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968,904,840, 776,474,708,644,580,516,942,878,814,1006,746,682,618,554,980,916,852,788,486, 720,656,592,528,954,890,826,1018,758,726,694,662,630,598,566,534,992,960,928, 896,864,832,800,768,498,764,732,700,668,636,604,572,540,998,966,934,902,870, 838,806,774,504,472,738,706,674,642,610,578,546,514,972,940,908,876,844,812, 780,510,478,1004,744,712,680,648,616,584,552,520,1023,978,946,914,882,850, 818,786,484,1010,718,654,590,526,952,888,824,1016,756,692,628,564,990,926, 862,798,496,730,666,602,538,964,900,836,772,470,704,640,576,512,938,874,810, 508,1002,742,678,614,550,976,912,848,784,482,716,652,588,524,950,886,822, 1014,754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,766, 702,638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,480,714,650, 586,522,948,884,820,1012,752,688,624,560,986,922,858,794,492] [views:debug,2014-08-19T16:49:07.111,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/470. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:07.111,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",470,active,0} [ns_server:debug,2014-08-19T16:49:07.253,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 468. Nacking mccouch update. [views:debug,2014-08-19T16:49:07.253,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/468. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:07.253,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",468,active,0} [ns_server:debug,2014-08-19T16:49:07.253,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,1022,762,698, 634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,476,710,646,582, 518,944,880,816,1008,748,684,620,556,982,918,854,790,488,722,658,594,530,956, 892,828,1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968,904,840, 776,474,708,644,580,516,942,878,814,1006,746,682,618,554,980,916,852,788,486, 720,656,592,528,954,890,826,1018,758,726,694,662,630,598,566,534,992,960,928, 896,864,832,800,768,498,764,732,700,668,636,604,572,540,998,966,934,902,870, 838,806,774,504,472,738,706,674,642,610,578,546,514,972,940,908,876,844,812, 780,510,478,1004,744,712,680,648,616,584,552,520,1023,978,946,914,882,850, 818,786,484,1010,718,654,590,526,952,888,824,1016,756,692,628,564,990,926, 862,798,496,730,666,602,538,964,900,836,772,470,704,640,576,512,938,874,810, 508,1002,742,678,614,550,976,912,848,784,482,716,652,588,524,950,886,822, 1014,754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,468, 766,702,638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,480,714, 650,586,522,948,884,820,1012,752,688,624,560,986,922,858,794,492] [views:debug,2014-08-19T16:49:07.317,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/468. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:07.317,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",468,active,0} [ns_server:debug,2014-08-19T16:49:07.417,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 466. Nacking mccouch update. [views:debug,2014-08-19T16:49:07.417,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/466. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:07.417,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",466,active,0} [ns_server:debug,2014-08-19T16:49:07.418,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,1022,762,698, 634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,476,710,646,582, 518,944,880,816,1008,748,684,620,556,982,918,854,790,488,722,658,594,530,956, 892,828,1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968,904,840, 776,474,708,644,580,516,942,878,814,1006,746,682,618,554,980,916,852,788,486, 720,656,592,528,954,890,826,1018,758,726,694,662,630,598,566,534,992,960,928, 896,864,832,800,768,498,466,764,732,700,668,636,604,572,540,998,966,934,902, 870,838,806,774,504,472,738,706,674,642,610,578,546,514,972,940,908,876,844, 812,780,510,478,1004,744,712,680,648,616,584,552,520,1023,978,946,914,882, 850,818,786,484,1010,718,654,590,526,952,888,824,1016,756,692,628,564,990, 926,862,798,496,730,666,602,538,964,900,836,772,470,704,640,576,512,938,874, 810,508,1002,742,678,614,550,976,912,848,784,482,716,652,588,524,950,886,822, 1014,754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,468, 766,702,638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,480,714, 650,586,522,948,884,820,1012,752,688,624,560,986,922,858,794,492] [views:debug,2014-08-19T16:49:07.476,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/466. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:07.476,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",466,active,0} [ns_server:debug,2014-08-19T16:49:07.626,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 464. Nacking mccouch update. [views:debug,2014-08-19T16:49:07.626,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/464. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:07.626,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",464,active,0} [ns_server:debug,2014-08-19T16:49:07.627,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,464,1022,762, 698,634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,476,710,646, 582,518,944,880,816,1008,748,684,620,556,982,918,854,790,488,722,658,594,530, 956,892,828,1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968,904, 840,776,474,708,644,580,516,942,878,814,1006,746,682,618,554,980,916,852,788, 486,720,656,592,528,954,890,826,1018,758,694,630,566,992,960,928,896,864,832, 800,768,498,466,764,732,700,668,636,604,572,540,998,966,934,902,870,838,806, 774,504,472,738,706,674,642,610,578,546,514,972,940,908,876,844,812,780,510, 478,1004,744,712,680,648,616,584,552,520,1023,978,946,914,882,850,818,786, 484,1010,718,654,590,526,952,888,824,1016,756,692,628,564,990,926,862,798, 496,730,666,602,538,964,900,836,772,470,704,640,576,512,938,874,810,508,1002, 742,678,614,550,976,912,848,784,482,716,652,588,524,950,886,822,1014,754,690, 626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,468,766,702,638, 574,936,872,808,506,1000,740,676,612,548,974,910,846,782,480,714,650,586,522, 948,884,820,1012,752,688,624,560,986,922,858,794,492,726,662,598,534] [views:debug,2014-08-19T16:49:07.685,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/464. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:07.685,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",464,active,0} [ns_server:debug,2014-08-19T16:49:07.768,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 462. Nacking mccouch update. [views:debug,2014-08-19T16:49:07.769,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/462. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:07.769,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",462,active,0} [ns_server:debug,2014-08-19T16:49:07.769,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,464,1022,762, 698,634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,476,710,646, 582,518,944,880,816,1008,748,684,620,556,982,918,854,790,488,722,658,594,530, 956,892,828,462,1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968, 904,840,776,474,708,644,580,516,942,878,814,1006,746,682,618,554,980,916,852, 788,486,720,656,592,528,954,890,826,1018,758,694,630,566,992,960,928,896,864, 832,800,768,498,466,764,732,700,668,636,604,572,540,998,966,934,902,870,838, 806,774,504,472,738,706,674,642,610,578,546,514,972,940,908,876,844,812,780, 510,478,1004,744,712,680,648,616,584,552,520,1023,978,946,914,882,850,818, 786,484,1010,718,654,590,526,952,888,824,1016,756,692,628,564,990,926,862, 798,496,730,666,602,538,964,900,836,772,470,704,640,576,512,938,874,810,508, 1002,742,678,614,550,976,912,848,784,482,716,652,588,524,950,886,822,1014, 754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,468,766, 702,638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,480,714,650, 586,522,948,884,820,1012,752,688,624,560,986,922,858,794,492,726,662,598,534] [views:debug,2014-08-19T16:49:07.819,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/462. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:07.819,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",462,active,0} [ns_server:debug,2014-08-19T16:49:07.904,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 460. Nacking mccouch update. [views:debug,2014-08-19T16:49:07.904,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/460. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:07.904,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",460,active,0} [ns_server:debug,2014-08-19T16:49:07.905,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,464,1022,762, 698,634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,476,710,646, 582,518,944,880,816,1008,748,684,620,556,982,918,854,790,488,722,658,594,530, 956,892,828,462,1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968, 904,840,776,474,708,644,580,516,942,878,814,1006,746,682,618,554,980,916,852, 788,486,720,656,592,528,954,890,826,460,1018,758,694,630,566,992,960,928,896, 864,832,800,768,498,466,764,732,700,668,636,604,572,540,998,966,934,902,870, 838,806,774,504,472,738,706,674,642,610,578,546,514,972,940,908,876,844,812, 780,510,478,1004,744,712,680,648,616,584,552,520,1023,978,946,914,882,850, 818,786,484,1010,718,654,590,526,952,888,824,1016,756,692,628,564,990,926, 862,798,496,730,666,602,538,964,900,836,772,470,704,640,576,512,938,874,810, 508,1002,742,678,614,550,976,912,848,784,482,716,652,588,524,950,886,822, 1014,754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,468, 766,702,638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,480,714, 650,586,522,948,884,820,1012,752,688,624,560,986,922,858,794,492,726,662,598, 534] [views:debug,2014-08-19T16:49:07.955,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/460. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:07.955,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",460,active,0} [ns_server:debug,2014-08-19T16:49:08.070,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 458. Nacking mccouch update. [views:debug,2014-08-19T16:49:08.070,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/458. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:08.070,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",458,active,0} [ns_server:debug,2014-08-19T16:49:08.071,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,464,1022,762, 698,634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,476,710,646, 582,518,944,880,816,1008,748,684,620,556,982,918,854,790,488,722,658,594,530, 956,892,828,462,1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968, 904,840,776,474,708,644,580,516,942,878,814,1006,746,682,618,554,980,916,852, 788,486,720,656,592,528,954,890,826,460,1018,758,694,630,566,992,960,928,896, 864,832,800,768,498,466,764,732,700,668,636,604,572,540,998,966,934,902,870, 838,806,774,504,472,738,706,674,642,610,578,546,514,972,940,908,876,844,812, 780,510,478,1004,744,712,680,648,616,584,552,520,1023,978,946,914,882,850, 818,786,484,1010,718,654,590,526,952,888,824,458,1016,756,692,628,564,990, 926,862,798,496,730,666,602,538,964,900,836,772,470,704,640,576,512,938,874, 810,508,1002,742,678,614,550,976,912,848,784,482,716,652,588,524,950,886,822, 1014,754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,468, 766,702,638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,480,714, 650,586,522,948,884,820,1012,752,688,624,560,986,922,858,794,492,726,662,598, 534] [views:debug,2014-08-19T16:49:08.138,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/458. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:08.138,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",458,active,0} [ns_server:debug,2014-08-19T16:49:08.287,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 456. Nacking mccouch update. [views:debug,2014-08-19T16:49:08.287,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/456. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:08.287,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",456,active,0} [ns_server:debug,2014-08-19T16:49:08.288,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,464,1022,762, 698,634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,476,710,646, 582,518,944,880,816,1008,748,684,620,556,982,918,854,790,488,722,658,594,530, 956,892,828,462,1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968, 904,840,776,474,708,644,580,516,942,878,814,1006,746,682,618,554,980,916,852, 788,486,720,656,592,528,954,890,826,460,1018,758,694,630,566,992,960,928,896, 864,832,800,768,498,466,764,732,700,668,636,604,572,540,998,966,934,902,870, 838,806,774,504,472,738,706,674,642,610,578,546,514,972,940,908,876,844,812, 780,510,478,1004,744,712,680,648,616,584,552,520,1023,978,946,914,882,850, 818,786,484,1010,718,654,590,526,952,888,824,458,1016,756,692,628,564,990, 926,862,798,496,730,666,602,538,964,900,836,772,470,704,640,576,512,938,874, 810,508,1002,742,678,614,550,976,912,848,784,482,716,652,588,524,950,886,822, 456,1014,754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770, 468,766,702,638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,480, 714,650,586,522,948,884,820,1012,752,688,624,560,986,922,858,794,492,726,662, 598,534] [views:debug,2014-08-19T16:49:08.338,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/456. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:08.339,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",456,active,0} [ns_server:debug,2014-08-19T16:49:08.489,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 454. Nacking mccouch update. [views:debug,2014-08-19T16:49:08.489,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/454. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:08.489,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",454,active,0} [ns_server:debug,2014-08-19T16:49:08.490,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,464,1022,762, 698,634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,476,710,646, 582,518,944,880,816,1008,748,684,620,556,982,918,854,790,488,722,658,594,530, 956,892,828,462,1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968, 904,840,776,474,708,644,580,516,942,878,814,1006,746,682,618,554,980,916,852, 788,486,720,656,592,528,954,890,826,460,1018,758,694,630,566,992,928,864,800, 498,764,732,700,668,636,604,572,540,998,966,934,902,870,838,806,774,504,472, 738,706,674,642,610,578,546,514,972,940,908,876,844,812,780,510,478,1004,744, 712,680,648,616,584,552,520,1023,978,946,914,882,850,818,786,484,1010,718, 654,590,526,952,888,824,458,1016,756,692,628,564,990,926,862,798,496,730,666, 602,538,964,900,836,772,470,704,640,576,512,938,874,810,508,1002,742,678,614, 550,976,912,848,784,482,716,652,588,524,950,886,822,456,1014,754,690,626,562, 988,924,860,796,494,728,664,600,536,962,898,834,770,468,766,702,638,574,936, 872,808,506,1000,740,676,612,548,974,910,846,782,480,714,650,586,522,948,884, 820,454,1012,752,688,624,560,986,922,858,794,492,726,662,598,534,960,896,832, 768,466] [views:debug,2014-08-19T16:49:08.574,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/454. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:08.574,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",454,active,0} [ns_server:debug,2014-08-19T16:49:08.748,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 452. Nacking mccouch update. [views:debug,2014-08-19T16:49:08.748,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/452. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:08.749,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",452,active,0} [ns_server:debug,2014-08-19T16:49:08.749,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,464,1022,762, 698,634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,476,710,646, 582,518,944,880,816,1008,748,684,620,556,982,918,854,790,488,722,658,594,530, 956,892,828,462,1020,760,696,632,568,994,930,866,802,500,734,670,606,542,968, 904,840,776,474,708,644,580,516,942,878,814,1006,746,682,618,554,980,916,852, 788,486,720,656,592,528,954,890,826,460,1018,758,694,630,566,992,928,864,800, 498,764,732,700,668,636,604,572,540,998,966,934,902,870,838,806,774,504,472, 738,706,674,642,610,578,546,514,972,940,908,876,844,812,780,510,478,1004,744, 712,680,648,616,584,552,520,1023,978,946,914,882,850,818,786,484,452,1010, 718,654,590,526,952,888,824,458,1016,756,692,628,564,990,926,862,798,496,730, 666,602,538,964,900,836,772,470,704,640,576,512,938,874,810,508,1002,742,678, 614,550,976,912,848,784,482,716,652,588,524,950,886,822,456,1014,754,690,626, 562,988,924,860,796,494,728,664,600,536,962,898,834,770,468,766,702,638,574, 936,872,808,506,1000,740,676,612,548,974,910,846,782,480,714,650,586,522,948, 884,820,454,1012,752,688,624,560,986,922,858,794,492,726,662,598,534,960,896, 832,768,466] [views:debug,2014-08-19T16:49:08.833,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/452. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:08.833,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",452,active,0} [ns_server:debug,2014-08-19T16:49:09.007,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 450. Nacking mccouch update. [views:debug,2014-08-19T16:49:09.008,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/450. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:09.008,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",450,active,0} [ns_server:debug,2014-08-19T16:49:09.008,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,464,1022,762, 698,634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,476,710,646, 582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488,722,658,594, 530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500,734,670,606,542, 968,904,840,776,474,708,644,580,516,942,878,814,1006,746,682,618,554,980,916, 852,788,486,720,656,592,528,954,890,826,460,1018,758,694,630,566,992,928,864, 800,498,764,732,700,668,636,604,572,540,998,966,934,902,870,838,806,774,504, 472,738,706,674,642,610,578,546,514,972,940,908,876,844,812,780,510,478,1004, 744,712,680,648,616,584,552,520,1023,978,946,914,882,850,818,786,484,452, 1010,718,654,590,526,952,888,824,458,1016,756,692,628,564,990,926,862,798, 496,730,666,602,538,964,900,836,772,470,704,640,576,512,938,874,810,508,1002, 742,678,614,550,976,912,848,784,482,716,652,588,524,950,886,822,456,1014,754, 690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,468,766,702, 638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,480,714,650,586, 522,948,884,820,454,1012,752,688,624,560,986,922,858,794,492,726,662,598,534, 960,896,832,768,466] [views:debug,2014-08-19T16:49:09.055,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/450. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:09.055,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",450,active,0} [ns_server:debug,2014-08-19T16:49:09.147,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 448. Nacking mccouch update. [views:debug,2014-08-19T16:49:09.147,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/448. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:09.147,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",448,active,0} [ns_server:debug,2014-08-19T16:49:09.148,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,464,1022,762, 698,634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,476,710,646, 582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488,722,658,594, 530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500,734,670,606,542, 968,904,840,776,474,708,644,580,516,942,878,814,448,1006,746,682,618,554,980, 916,852,788,486,720,656,592,528,954,890,826,460,1018,758,694,630,566,992,928, 864,800,498,764,732,700,668,636,604,572,540,998,966,934,902,870,838,806,774, 504,472,738,706,674,642,610,578,546,514,972,940,908,876,844,812,780,510,478, 1004,744,712,680,648,616,584,552,520,1023,978,946,914,882,850,818,786,484, 452,1010,718,654,590,526,952,888,824,458,1016,756,692,628,564,990,926,862, 798,496,730,666,602,538,964,900,836,772,470,704,640,576,512,938,874,810,508, 1002,742,678,614,550,976,912,848,784,482,716,652,588,524,950,886,822,456, 1014,754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,468, 766,702,638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,480,714, 650,586,522,948,884,820,454,1012,752,688,624,560,986,922,858,794,492,726,662, 598,534,960,896,832,768,466] [views:debug,2014-08-19T16:49:09.206,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/448. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:09.206,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",448,active,0} [ns_server:debug,2014-08-19T16:49:09.348,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 446. Nacking mccouch update. [views:debug,2014-08-19T16:49:09.348,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/446. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:09.348,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",446,active,0} [ns_server:debug,2014-08-19T16:49:09.348,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,464,1022,762, 698,634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,476,710,646, 582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488,722,658,594, 530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500,734,670,606,542, 968,904,840,776,474,708,644,580,516,942,878,814,448,1006,746,682,618,554,980, 916,852,788,486,720,656,592,528,954,890,826,460,1018,758,694,630,566,992,928, 864,800,498,764,732,700,668,636,604,572,540,998,966,934,902,870,838,806,774, 504,472,738,706,674,642,610,578,546,514,972,940,908,876,844,812,780,510,478, 446,1004,744,712,680,648,616,584,552,520,1023,978,946,914,882,850,818,786, 484,452,1010,718,654,590,526,952,888,824,458,1016,756,692,628,564,990,926, 862,798,496,730,666,602,538,964,900,836,772,470,704,640,576,512,938,874,810, 508,1002,742,678,614,550,976,912,848,784,482,716,652,588,524,950,886,822,456, 1014,754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,468, 766,702,638,574,936,872,808,506,1000,740,676,612,548,974,910,846,782,480,714, 650,586,522,948,884,820,454,1012,752,688,624,560,986,922,858,794,492,726,662, 598,534,960,896,832,768,466] [views:debug,2014-08-19T16:49:09.406,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/446. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:09.407,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",446,active,0} [ns_server:debug,2014-08-19T16:49:09.548,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 444. Nacking mccouch update. [views:debug,2014-08-19T16:49:09.548,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/444. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:09.549,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",444,active,0} [ns_server:debug,2014-08-19T16:49:09.549,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,464,1022,762, 698,634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,476,710,646, 582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488,722,658,594, 530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500,734,670,606,542, 968,904,840,776,474,708,644,580,516,942,878,814,448,1006,746,682,618,554,980, 916,852,788,486,720,656,592,528,954,890,826,460,1018,758,694,630,566,992,928, 864,800,498,732,668,604,540,998,966,934,902,870,838,806,774,504,472,738,706, 674,642,610,578,546,514,972,940,908,876,844,812,780,510,478,446,1004,744,712, 680,648,616,584,552,520,1023,978,946,914,882,850,818,786,484,452,1010,718, 654,590,526,952,888,824,458,1016,756,692,628,564,990,926,862,798,496,730,666, 602,538,964,900,836,772,470,704,640,576,512,938,874,810,508,444,1002,742,678, 614,550,976,912,848,784,482,716,652,588,524,950,886,822,456,1014,754,690,626, 562,988,924,860,796,494,728,664,600,536,962,898,834,770,468,766,702,638,574, 936,872,808,506,1000,740,676,612,548,974,910,846,782,480,714,650,586,522,948, 884,820,454,1012,752,688,624,560,986,922,858,794,492,726,662,598,534,960,896, 832,768,466,764,700,636,572] [views:debug,2014-08-19T16:49:09.632,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/444. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:09.632,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",444,active,0} [ns_server:debug,2014-08-19T16:49:09.774,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 442. Nacking mccouch update. [views:debug,2014-08-19T16:49:09.774,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/442. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:09.774,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",442,active,0} [ns_server:debug,2014-08-19T16:49:09.775,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,464,1022,762, 698,634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,476,710,646, 582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488,722,658,594, 530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500,734,670,606,542, 968,904,840,776,474,708,644,580,516,942,878,814,448,1006,746,682,618,554,980, 916,852,788,486,720,656,592,528,954,890,826,460,1018,758,694,630,566,992,928, 864,800,498,732,668,604,540,998,966,934,902,870,838,806,774,504,472,738,706, 674,642,610,578,546,514,972,940,908,876,844,812,780,510,478,446,1004,744,712, 680,648,616,584,552,520,1023,978,946,914,882,850,818,786,484,452,1010,718, 654,590,526,952,888,824,458,1016,756,692,628,564,990,926,862,798,496,730,666, 602,538,964,900,836,772,470,704,640,576,512,938,874,810,508,444,1002,742,678, 614,550,976,912,848,784,482,716,652,588,524,950,886,822,456,1014,754,690,626, 562,988,924,860,796,494,728,664,600,536,962,898,834,770,468,766,702,638,574, 936,872,808,506,442,1000,740,676,612,548,974,910,846,782,480,714,650,586,522, 948,884,820,454,1012,752,688,624,560,986,922,858,794,492,726,662,598,534,960, 896,832,768,466,764,700,636,572] [views:debug,2014-08-19T16:49:09.833,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/442. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:09.834,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",442,active,0} [ns_server:debug,2014-08-19T16:49:10.000,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 440. Nacking mccouch update. [views:debug,2014-08-19T16:49:10.000,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/440. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:10.000,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",440,active,0} [ns_server:debug,2014-08-19T16:49:10.001,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,464,1022,762, 698,634,570,996,932,868,804,502,736,672,608,544,970,906,842,778,476,710,646, 582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488,722,658,594, 530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500,734,670,606,542, 968,904,840,776,474,708,644,580,516,942,878,814,448,1006,746,682,618,554,980, 916,852,788,486,720,656,592,528,954,890,826,460,1018,758,694,630,566,992,928, 864,800,498,732,668,604,540,998,966,934,902,870,838,806,774,504,472,440,738, 706,674,642,610,578,546,514,972,940,908,876,844,812,780,510,478,446,1004,744, 712,680,648,616,584,552,520,1023,978,946,914,882,850,818,786,484,452,1010, 718,654,590,526,952,888,824,458,1016,756,692,628,564,990,926,862,798,496,730, 666,602,538,964,900,836,772,470,704,640,576,512,938,874,810,508,444,1002,742, 678,614,550,976,912,848,784,482,716,652,588,524,950,886,822,456,1014,754,690, 626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,468,766,702,638, 574,936,872,808,506,442,1000,740,676,612,548,974,910,846,782,480,714,650,586, 522,948,884,820,454,1012,752,688,624,560,986,922,858,794,492,726,662,598,534, 960,896,832,768,466,764,700,636,572] [views:debug,2014-08-19T16:49:10.059,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/440. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:10.059,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",440,active,0} [ns_server:debug,2014-08-19T16:49:10.234,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 438. Nacking mccouch update. [views:debug,2014-08-19T16:49:10.234,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/438. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:10.234,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",438,active,0} [ns_server:debug,2014-08-19T16:49:10.235,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,464,1022,762, 698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476,710, 646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488,722,658, 594,530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500,734,670,606, 542,968,904,840,776,474,708,644,580,516,942,878,814,448,1006,746,682,618,554, 980,916,852,788,486,720,656,592,528,954,890,826,460,1018,758,694,630,566,992, 928,864,800,498,732,668,604,540,998,966,934,902,870,838,806,774,504,472,440, 738,706,674,642,610,578,546,514,972,940,908,876,844,812,780,510,478,446,1004, 744,712,680,648,616,584,552,520,1023,978,946,914,882,850,818,786,484,452, 1010,718,654,590,526,952,888,824,458,1016,756,692,628,564,990,926,862,798, 496,730,666,602,538,964,900,836,772,470,704,640,576,512,938,874,810,508,444, 1002,742,678,614,550,976,912,848,784,482,716,652,588,524,950,886,822,456, 1014,754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,468, 766,702,638,574,936,872,808,506,442,1000,740,676,612,548,974,910,846,782,480, 714,650,586,522,948,884,820,454,1012,752,688,624,560,986,922,858,794,492,726, 662,598,534,960,896,832,768,466,764,700,636,572] [views:debug,2014-08-19T16:49:10.318,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/438. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:10.318,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",438,active,0} [ns_server:debug,2014-08-19T16:49:10.493,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 436. Nacking mccouch update. [views:debug,2014-08-19T16:49:10.493,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/436. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:10.493,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",436,active,0} [ns_server:debug,2014-08-19T16:49:10.494,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,464,1022,762, 698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476,710, 646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488,722,658, 594,530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500,436,734,670, 606,542,968,904,840,776,474,708,644,580,516,942,878,814,448,1006,746,682,618, 554,980,916,852,788,486,720,656,592,528,954,890,826,460,1018,758,694,630,566, 992,928,864,800,498,732,668,604,540,998,966,934,902,870,838,806,774,504,472, 440,738,706,674,642,610,578,546,514,972,940,908,876,844,812,780,510,478,446, 1004,744,712,680,648,616,584,552,520,1023,978,946,914,882,850,818,786,484, 452,1010,718,654,590,526,952,888,824,458,1016,756,692,628,564,990,926,862, 798,496,730,666,602,538,964,900,836,772,470,704,640,576,512,938,874,810,508, 444,1002,742,678,614,550,976,912,848,784,482,716,652,588,524,950,886,822,456, 1014,754,690,626,562,988,924,860,796,494,728,664,600,536,962,898,834,770,468, 766,702,638,574,936,872,808,506,442,1000,740,676,612,548,974,910,846,782,480, 714,650,586,522,948,884,820,454,1012,752,688,624,560,986,922,858,794,492,726, 662,598,534,960,896,832,768,466,764,700,636,572] [views:debug,2014-08-19T16:49:10.577,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/436. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:10.577,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",436,active,0} [ns_server:debug,2014-08-19T16:49:10.752,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 434. Nacking mccouch update. [views:debug,2014-08-19T16:49:10.752,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/434. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:10.753,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",434,active,0} [ns_server:debug,2014-08-19T16:49:10.753,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,464,1022,762, 698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476,710, 646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488,722,658, 594,530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500,436,734,670, 606,542,968,904,840,776,474,708,644,580,516,942,878,814,448,1006,746,682,618, 554,980,916,852,788,486,720,656,592,528,954,890,826,460,1018,758,694,630,566, 992,928,864,800,498,434,732,668,604,540,966,902,838,774,472,738,706,674,642, 610,578,546,514,972,940,908,876,844,812,780,510,478,446,1004,744,712,680,648, 616,584,552,520,1023,978,946,914,882,850,818,786,484,452,1010,718,654,590, 526,952,888,824,458,1016,756,692,628,564,990,926,862,798,496,730,666,602,538, 964,900,836,772,470,704,640,576,512,938,874,810,508,444,1002,742,678,614,550, 976,912,848,784,482,716,652,588,524,950,886,822,456,1014,754,690,626,562,988, 924,860,796,494,728,664,600,536,962,898,834,770,468,766,702,638,574,936,872, 808,506,442,1000,740,676,612,548,974,910,846,782,480,714,650,586,522,948,884, 820,454,1012,752,688,624,560,986,922,858,794,492,726,662,598,534,960,896,832, 768,466,764,700,636,572,998,934,870,806,504,440] [views:debug,2014-08-19T16:49:10.812,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/434. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:10.812,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",434,active,0} [ns_server:debug,2014-08-19T16:49:10.975,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 432. Nacking mccouch update. [views:debug,2014-08-19T16:49:10.975,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/432. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:10.975,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",432,active,0} [ns_server:debug,2014-08-19T16:49:10.975,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,464,1022,762, 698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476,710, 646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488,722,658, 594,530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500,436,734,670, 606,542,968,904,840,776,474,708,644,580,516,942,878,814,448,1006,746,682,618, 554,980,916,852,788,486,720,656,592,528,954,890,826,460,1018,758,694,630,566, 992,928,864,800,498,434,732,668,604,540,966,902,838,774,472,738,706,674,642, 610,578,546,514,972,940,908,876,844,812,780,510,478,446,1004,744,712,680,648, 616,584,552,520,1023,978,946,914,882,850,818,786,484,452,1010,718,654,590, 526,952,888,824,458,1016,756,692,628,564,990,926,862,798,496,432,730,666,602, 538,964,900,836,772,470,704,640,576,512,938,874,810,508,444,1002,742,678,614, 550,976,912,848,784,482,716,652,588,524,950,886,822,456,1014,754,690,626,562, 988,924,860,796,494,728,664,600,536,962,898,834,770,468,766,702,638,574,936, 872,808,506,442,1000,740,676,612,548,974,910,846,782,480,714,650,586,522,948, 884,820,454,1012,752,688,624,560,986,922,858,794,492,726,662,598,534,960,896, 832,768,466,764,700,636,572,998,934,870,806,504,440] [views:debug,2014-08-19T16:49:11.034,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/432. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:11.034,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",432,active,0} [ns_server:debug,2014-08-19T16:49:11.100,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 430. Nacking mccouch update. [views:debug,2014-08-19T16:49:11.101,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/430. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:11.101,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",430,active,0} [ns_server:debug,2014-08-19T16:49:11.101,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,464,1022,762, 698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476,710, 646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488,722,658, 594,530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500,436,734,670, 606,542,968,904,840,776,474,708,644,580,516,942,878,814,448,1006,746,682,618, 554,980,916,852,788,486,720,656,592,528,954,890,826,460,1018,758,694,630,566, 992,928,864,800,498,434,732,668,604,540,966,902,838,774,472,738,706,674,642, 610,578,546,514,972,940,908,876,844,812,780,510,478,446,1004,744,712,680,648, 616,584,552,520,1023,978,946,914,882,850,818,786,484,452,1010,718,654,590, 526,952,888,824,458,1016,756,692,628,564,990,926,862,798,496,432,730,666,602, 538,964,900,836,772,470,704,640,576,512,938,874,810,508,444,1002,742,678,614, 550,976,912,848,784,482,716,652,588,524,950,886,822,456,1014,754,690,626,562, 988,924,860,796,494,430,728,664,600,536,962,898,834,770,468,766,702,638,574, 936,872,808,506,442,1000,740,676,612,548,974,910,846,782,480,714,650,586,522, 948,884,820,454,1012,752,688,624,560,986,922,858,794,492,726,662,598,534,960, 896,832,768,466,764,700,636,572,998,934,870,806,504,440] [views:debug,2014-08-19T16:49:11.135,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/430. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:11.135,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",430,active,0} [ns_server:debug,2014-08-19T16:49:11.226,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 428. Nacking mccouch update. [views:debug,2014-08-19T16:49:11.226,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/428. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:11.226,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",428,active,0} [ns_server:debug,2014-08-19T16:49:11.227,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,724,660,596,532,958,894,830,464,1022,762, 698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476,710, 646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488,722,658, 594,530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500,436,734,670, 606,542,968,904,840,776,474,708,644,580,516,942,878,814,448,1006,746,682,618, 554,980,916,852,788,486,720,656,592,528,954,890,826,460,1018,758,694,630,566, 992,928,864,800,498,434,732,668,604,540,966,902,838,774,472,738,706,674,642, 610,578,546,514,972,940,908,876,844,812,780,510,478,446,1004,744,712,680,648, 616,584,552,520,1023,978,946,914,882,850,818,786,484,452,1010,718,654,590, 526,952,888,824,458,1016,756,692,628,564,990,926,862,798,496,432,730,666,602, 538,964,900,836,772,470,704,640,576,512,938,874,810,508,444,1002,742,678,614, 550,976,912,848,784,482,716,652,588,524,950,886,822,456,1014,754,690,626,562, 988,924,860,796,494,430,728,664,600,536,962,898,834,770,468,766,702,638,574, 936,872,808,506,442,1000,740,676,612,548,974,910,846,782,480,714,650,586,522, 948,884,820,454,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534, 960,896,832,768,466,764,700,636,572,998,934,870,806,504,440] [views:debug,2014-08-19T16:49:11.285,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/428. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:11.285,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",428,active,0} [ns_server:debug,2014-08-19T16:49:11.385,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 426. Nacking mccouch update. [views:debug,2014-08-19T16:49:11.385,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/426. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:11.385,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",426,active,0} [ns_server:debug,2014-08-19T16:49:11.386,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,426,724,660,596,532,958,894,830,464,1022, 762,698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476, 710,646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488,722, 658,594,530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500,436,734, 670,606,542,968,904,840,776,474,708,644,580,516,942,878,814,448,1006,746,682, 618,554,980,916,852,788,486,720,656,592,528,954,890,826,460,1018,758,694,630, 566,992,928,864,800,498,434,732,668,604,540,966,902,838,774,472,738,706,674, 642,610,578,546,514,972,940,908,876,844,812,780,510,478,446,1004,744,712,680, 648,616,584,552,520,1023,978,946,914,882,850,818,786,484,452,1010,718,654, 590,526,952,888,824,458,1016,756,692,628,564,990,926,862,798,496,432,730,666, 602,538,964,900,836,772,470,704,640,576,512,938,874,810,508,444,1002,742,678, 614,550,976,912,848,784,482,716,652,588,524,950,886,822,456,1014,754,690,626, 562,988,924,860,796,494,430,728,664,600,536,962,898,834,770,468,766,702,638, 574,936,872,808,506,442,1000,740,676,612,548,974,910,846,782,480,714,650,586, 522,948,884,820,454,1012,752,688,624,560,986,922,858,794,492,428,726,662,598, 534,960,896,832,768,466,764,700,636,572,998,934,870,806,504,440] [views:debug,2014-08-19T16:49:11.445,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/426. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:11.445,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",426,active,0} [ns_server:debug,2014-08-19T16:49:11.528,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 424. Nacking mccouch update. [views:debug,2014-08-19T16:49:11.528,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/424. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:11.528,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",424,active,0} [ns_server:debug,2014-08-19T16:49:11.529,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,426,724,660,596,532,958,894,830,464,1022, 762,698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476, 710,646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488,424, 722,658,594,530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500,436, 734,670,606,542,968,904,840,776,474,708,644,580,516,942,878,814,448,1006,746, 682,618,554,980,916,852,788,486,720,656,592,528,954,890,826,460,1018,758,694, 630,566,992,928,864,800,498,434,732,668,604,540,966,902,838,774,472,706,642, 578,514,972,940,908,876,844,812,780,510,478,446,1004,744,712,680,648,616,584, 552,520,1023,978,946,914,882,850,818,786,484,452,1010,718,654,590,526,952, 888,824,458,1016,756,692,628,564,990,926,862,798,496,432,730,666,602,538,964, 900,836,772,470,704,640,576,512,938,874,810,508,444,1002,742,678,614,550,976, 912,848,784,482,716,652,588,524,950,886,822,456,1014,754,690,626,562,988,924, 860,796,494,430,728,664,600,536,962,898,834,770,468,766,702,638,574,936,872, 808,506,442,1000,740,676,612,548,974,910,846,782,480,714,650,586,522,948,884, 820,454,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534,960,896, 832,768,466,764,700,636,572,998,934,870,806,504,440,738,674,610,546] [views:debug,2014-08-19T16:49:11.603,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/424. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:11.604,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",424,active,0} [ns_server:debug,2014-08-19T16:49:11.695,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 422. Nacking mccouch update. [views:debug,2014-08-19T16:49:11.695,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/422. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:11.695,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",422,active,0} [ns_server:debug,2014-08-19T16:49:11.696,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,426,724,660,596,532,958,894,830,464,1022, 762,698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476, 710,646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488,424, 722,658,594,530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500,436, 734,670,606,542,968,904,840,776,474,708,644,580,516,942,878,814,448,1006,746, 682,618,554,980,916,852,788,486,422,720,656,592,528,954,890,826,460,1018,758, 694,630,566,992,928,864,800,498,434,732,668,604,540,966,902,838,774,472,706, 642,578,514,972,940,908,876,844,812,780,510,478,446,1004,744,712,680,648,616, 584,552,520,1023,978,946,914,882,850,818,786,484,452,1010,718,654,590,526, 952,888,824,458,1016,756,692,628,564,990,926,862,798,496,432,730,666,602,538, 964,900,836,772,470,704,640,576,512,938,874,810,508,444,1002,742,678,614,550, 976,912,848,784,482,716,652,588,524,950,886,822,456,1014,754,690,626,562,988, 924,860,796,494,430,728,664,600,536,962,898,834,770,468,766,702,638,574,936, 872,808,506,442,1000,740,676,612,548,974,910,846,782,480,714,650,586,522,948, 884,820,454,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534,960, 896,832,768,466,764,700,636,572,998,934,870,806,504,440,738,674,610,546] [views:debug,2014-08-19T16:49:11.729,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/422. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:11.729,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",422,active,0} [ns_server:debug,2014-08-19T16:49:11.838,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 420. Nacking mccouch update. [views:debug,2014-08-19T16:49:11.838,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/420. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:11.838,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",420,active,0} [ns_server:debug,2014-08-19T16:49:11.839,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,426,724,660,596,532,958,894,830,464,1022, 762,698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476, 710,646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488,424, 722,658,594,530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500,436, 734,670,606,542,968,904,840,776,474,708,644,580,516,942,878,814,448,1006,746, 682,618,554,980,916,852,788,486,422,720,656,592,528,954,890,826,460,1018,758, 694,630,566,992,928,864,800,498,434,732,668,604,540,966,902,838,774,472,706, 642,578,514,972,940,908,876,844,812,780,510,478,446,1004,744,712,680,648,616, 584,552,520,1023,978,946,914,882,850,818,786,484,452,420,1010,718,654,590, 526,952,888,824,458,1016,756,692,628,564,990,926,862,798,496,432,730,666,602, 538,964,900,836,772,470,704,640,576,512,938,874,810,508,444,1002,742,678,614, 550,976,912,848,784,482,716,652,588,524,950,886,822,456,1014,754,690,626,562, 988,924,860,796,494,430,728,664,600,536,962,898,834,770,468,766,702,638,574, 936,872,808,506,442,1000,740,676,612,548,974,910,846,782,480,714,650,586,522, 948,884,820,454,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534, 960,896,832,768,466,764,700,636,572,998,934,870,806,504,440,738,674,610,546] [views:debug,2014-08-19T16:49:11.896,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/420. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:11.896,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",420,active,0} [ns_server:debug,2014-08-19T16:49:12.038,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 418. Nacking mccouch update. [views:debug,2014-08-19T16:49:12.038,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/418. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:12.039,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",418,active,0} [ns_server:debug,2014-08-19T16:49:12.039,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,426,724,660,596,532,958,894,830,464,1022, 762,698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476, 710,646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488,424, 722,658,594,530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500,436, 734,670,606,542,968,904,840,776,474,708,644,580,516,942,878,814,448,1006,746, 682,618,554,980,916,852,788,486,422,720,656,592,528,954,890,826,460,1018,758, 694,630,566,992,928,864,800,498,434,732,668,604,540,966,902,838,774,472,706, 642,578,514,972,940,908,876,844,812,780,510,478,446,1004,744,712,680,648,616, 584,552,520,1023,978,946,914,882,850,818,786,484,452,420,1010,718,654,590, 526,952,888,824,458,1016,756,692,628,564,990,926,862,798,496,432,730,666,602, 538,964,900,836,772,470,704,640,576,512,938,874,810,508,444,1002,742,678,614, 550,976,912,848,784,482,418,716,652,588,524,950,886,822,456,1014,754,690,626, 562,988,924,860,796,494,430,728,664,600,536,962,898,834,770,468,766,702,638, 574,936,872,808,506,442,1000,740,676,612,548,974,910,846,782,480,714,650,586, 522,948,884,820,454,1012,752,688,624,560,986,922,858,794,492,428,726,662,598, 534,960,896,832,768,466,764,700,636,572,998,934,870,806,504,440,738,674,610, 546] [views:debug,2014-08-19T16:49:12.089,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/418. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:12.089,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",418,active,0} [ns_server:debug,2014-08-19T16:49:12.240,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 416. Nacking mccouch update. [views:debug,2014-08-19T16:49:12.240,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/416. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:12.241,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",416,active,0} [ns_server:debug,2014-08-19T16:49:12.241,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,426,724,660,596,532,958,894,830,464,1022, 762,698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476, 710,646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488,424, 722,658,594,530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500,436, 734,670,606,542,968,904,840,776,474,708,644,580,516,942,878,814,448,1006,746, 682,618,554,980,916,852,788,486,422,720,656,592,528,954,890,826,460,1018,758, 694,630,566,992,928,864,800,498,434,732,668,604,540,966,902,838,774,472,706, 642,578,514,972,940,908,876,844,812,780,510,478,446,1004,744,712,680,648,616, 584,552,520,1023,978,946,914,882,850,818,786,484,452,420,1010,718,654,590, 526,952,888,824,458,1016,756,692,628,564,990,926,862,798,496,432,730,666,602, 538,964,900,836,772,470,704,640,576,512,938,874,810,508,444,1002,742,678,614, 550,976,912,848,784,482,418,716,652,588,524,950,886,822,456,1014,754,690,626, 562,988,924,860,796,494,430,728,664,600,536,962,898,834,770,468,766,702,638, 574,936,872,808,506,442,1000,740,676,612,548,974,910,846,782,480,416,714,650, 586,522,948,884,820,454,1012,752,688,624,560,986,922,858,794,492,428,726,662, 598,534,960,896,832,768,466,764,700,636,572,998,934,870,806,504,440,738,674, 610,546] [views:debug,2014-08-19T16:49:12.299,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/416. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:12.300,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",416,active,0} [ns_server:debug,2014-08-19T16:49:12.436,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 414. Nacking mccouch update. [views:debug,2014-08-19T16:49:12.436,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/414. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:12.437,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",414,active,0} [ns_server:debug,2014-08-19T16:49:12.437,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,426,724,660,596,532,958,894,830,464,1022, 762,698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476, 710,646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488,424, 722,658,594,530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500,436, 734,670,606,542,968,904,840,776,474,708,644,580,516,942,878,814,448,1006,746, 682,618,554,980,916,852,788,486,422,720,656,592,528,954,890,826,460,1018,758, 694,630,566,992,928,864,800,498,434,732,668,604,540,966,902,838,774,472,706, 642,578,514,940,876,812,510,446,1004,744,712,680,648,616,584,552,520,1023, 978,946,914,882,850,818,786,484,452,420,1010,718,654,590,526,952,888,824,458, 1016,756,692,628,564,990,926,862,798,496,432,730,666,602,538,964,900,836,772, 470,704,640,576,512,938,874,810,508,444,1002,742,678,614,550,976,912,848,784, 482,418,716,652,588,524,950,886,822,456,1014,754,690,626,562,988,924,860,796, 494,430,728,664,600,536,962,898,834,770,468,766,702,638,574,936,872,808,506, 442,1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522,948,884,820, 454,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534,960,896,832, 768,466,764,700,636,572,998,934,870,806,504,440,738,674,610,546,972,908,844, 780,478,414] [views:debug,2014-08-19T16:49:12.488,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/414. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:12.489,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",414,active,0} [ns_server:debug,2014-08-19T16:49:12.555,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 412. Nacking mccouch update. [views:debug,2014-08-19T16:49:12.555,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/412. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:12.556,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",412,active,0} [ns_server:debug,2014-08-19T16:49:12.556,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,426,724,660,596,532,958,894,830,464,1022, 762,698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476, 412,710,646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488, 424,722,658,594,530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500, 436,734,670,606,542,968,904,840,776,474,708,644,580,516,942,878,814,448,1006, 746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890,826,460,1018, 758,694,630,566,992,928,864,800,498,434,732,668,604,540,966,902,838,774,472, 706,642,578,514,940,876,812,510,446,1004,744,712,680,648,616,584,552,520, 1023,978,946,914,882,850,818,786,484,452,420,1010,718,654,590,526,952,888, 824,458,1016,756,692,628,564,990,926,862,798,496,432,730,666,602,538,964,900, 836,772,470,704,640,576,512,938,874,810,508,444,1002,742,678,614,550,976,912, 848,784,482,418,716,652,588,524,950,886,822,456,1014,754,690,626,562,988,924, 860,796,494,430,728,664,600,536,962,898,834,770,468,766,702,638,574,936,872, 808,506,442,1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522,948, 884,820,454,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534,960, 896,832,768,466,764,700,636,572,998,934,870,806,504,440,738,674,610,546,972, 908,844,780,478,414] [views:debug,2014-08-19T16:49:12.589,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/412. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:12.589,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",412,active,0} [ns_server:debug,2014-08-19T16:49:12.656,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 410. Nacking mccouch update. [views:debug,2014-08-19T16:49:12.656,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/410. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:12.656,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",410,active,0} [ns_server:debug,2014-08-19T16:49:12.657,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,426,724,660,596,532,958,894,830,464,1022, 762,698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476, 412,710,646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488, 424,722,658,594,530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500, 436,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814,448, 1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890,826,460, 1018,758,694,630,566,992,928,864,800,498,434,732,668,604,540,966,902,838,774, 472,706,642,578,514,940,876,812,510,446,1004,744,712,680,648,616,584,552,520, 1023,978,946,914,882,850,818,786,484,452,420,1010,718,654,590,526,952,888, 824,458,1016,756,692,628,564,990,926,862,798,496,432,730,666,602,538,964,900, 836,772,470,704,640,576,512,938,874,810,508,444,1002,742,678,614,550,976,912, 848,784,482,418,716,652,588,524,950,886,822,456,1014,754,690,626,562,988,924, 860,796,494,430,728,664,600,536,962,898,834,770,468,766,702,638,574,936,872, 808,506,442,1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522,948, 884,820,454,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534,960, 896,832,768,466,764,700,636,572,998,934,870,806,504,440,738,674,610,546,972, 908,844,780,478,414] [views:debug,2014-08-19T16:49:12.690,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/410. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:12.690,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",410,active,0} [ns_server:debug,2014-08-19T16:49:12.865,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 408. Nacking mccouch update. [views:debug,2014-08-19T16:49:12.865,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/408. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:12.865,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",408,active,0} [ns_server:debug,2014-08-19T16:49:12.866,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,426,724,660,596,532,958,894,830,464,1022, 762,698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476, 412,710,646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488, 424,722,658,594,530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500, 436,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814,448, 1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890,826,460, 1018,758,694,630,566,992,928,864,800,498,434,732,668,604,540,966,902,838,774, 472,408,706,642,578,514,940,876,812,510,446,1004,744,712,680,648,616,584,552, 520,1023,978,946,914,882,850,818,786,484,452,420,1010,718,654,590,526,952, 888,824,458,1016,756,692,628,564,990,926,862,798,496,432,730,666,602,538,964, 900,836,772,470,704,640,576,512,938,874,810,508,444,1002,742,678,614,550,976, 912,848,784,482,418,716,652,588,524,950,886,822,456,1014,754,690,626,562,988, 924,860,796,494,430,728,664,600,536,962,898,834,770,468,766,702,638,574,936, 872,808,506,442,1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522, 948,884,820,454,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534, 960,896,832,768,466,764,700,636,572,998,934,870,806,504,440,738,674,610,546, 972,908,844,780,478,414] [views:debug,2014-08-19T16:49:12.949,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/408. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:12.949,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",408,active,0} [ns_server:debug,2014-08-19T16:49:13.124,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 406. Nacking mccouch update. [views:debug,2014-08-19T16:49:13.124,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/406. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:13.125,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",406,active,0} [ns_server:debug,2014-08-19T16:49:13.125,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,426,724,660,596,532,958,894,830,464,1022, 762,698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476, 412,710,646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488, 424,722,658,594,530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500, 436,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814,448, 1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890,826,460, 1018,758,694,630,566,992,928,864,800,498,434,732,668,604,540,966,902,838,774, 472,408,706,642,578,514,940,876,812,510,446,1004,744,712,680,648,616,584,552, 520,1023,978,946,914,882,850,818,786,484,452,420,1010,718,654,590,526,952, 888,824,458,1016,756,692,628,564,990,926,862,798,496,432,730,666,602,538,964, 900,836,772,470,406,704,640,576,512,938,874,810,508,444,1002,742,678,614,550, 976,912,848,784,482,418,716,652,588,524,950,886,822,456,1014,754,690,626,562, 988,924,860,796,494,430,728,664,600,536,962,898,834,770,468,766,702,638,574, 936,872,808,506,442,1000,740,676,612,548,974,910,846,782,480,416,714,650,586, 522,948,884,820,454,1012,752,688,624,560,986,922,858,794,492,428,726,662,598, 534,960,896,832,768,466,764,700,636,572,998,934,870,806,504,440,738,674,610, 546,972,908,844,780,478,414] [views:debug,2014-08-19T16:49:13.200,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/406. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:13.200,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",406,active,0} [ns_server:debug,2014-08-19T16:49:13.267,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 404. Nacking mccouch update. [views:debug,2014-08-19T16:49:13.267,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/404. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:13.267,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",404,active,0} [ns_server:debug,2014-08-19T16:49:13.268,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,426,724,660,596,532,958,894,830,464,1022, 762,698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476, 412,710,646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488, 424,722,658,594,530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500, 436,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814,448, 1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890,826,460, 1018,758,694,630,566,992,928,864,800,498,434,732,668,604,540,966,902,838,774, 472,408,706,642,578,514,940,876,812,510,446,1004,744,680,616,552,978,946,914, 882,850,818,786,484,452,420,1010,718,654,590,526,952,888,824,458,1016,756, 692,628,564,990,926,862,798,496,432,730,666,602,538,964,900,836,772,470,406, 704,640,576,512,938,874,810,508,444,1002,742,678,614,550,976,912,848,784,482, 418,716,652,588,524,950,886,822,456,1014,754,690,626,562,988,924,860,796,494, 430,728,664,600,536,962,898,834,770,468,404,766,702,638,574,936,872,808,506, 442,1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522,948,884,820, 454,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534,960,896,832, 768,466,764,700,636,572,998,934,870,806,504,440,738,674,610,546,972,908,844, 780,478,414,712,648,584,520,1023] [views:debug,2014-08-19T16:49:13.301,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/404. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:13.301,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",404,active,0} [ns_server:debug,2014-08-19T16:49:13.376,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 402. Nacking mccouch update. [views:debug,2014-08-19T16:49:13.376,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/402. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:13.376,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",402,active,0} [ns_server:debug,2014-08-19T16:49:13.377,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,426,724,660,596,532,958,894,830,464,1022, 762,698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476, 412,710,646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790,488, 424,722,658,594,530,956,892,828,462,1020,760,696,632,568,994,930,866,802,500, 436,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814,448, 1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890,826,460, 1018,758,694,630,566,992,928,864,800,498,434,732,668,604,540,966,902,838,774, 472,408,706,642,578,514,940,876,812,510,446,1004,744,680,616,552,978,946,914, 882,850,818,786,484,452,420,1010,718,654,590,526,952,888,824,458,1016,756, 692,628,564,990,926,862,798,496,432,730,666,602,538,964,900,836,772,470,406, 704,640,576,512,938,874,810,508,444,1002,742,678,614,550,976,912,848,784,482, 418,716,652,588,524,950,886,822,456,1014,754,690,626,562,988,924,860,796,494, 430,728,664,600,536,962,898,834,770,468,404,766,702,638,574,936,872,808,506, 442,1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522,948,884,820, 454,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534,960,896,832, 768,466,402,764,700,636,572,998,934,870,806,504,440,738,674,610,546,972,908, 844,780,478,414,712,648,584,520,1023] [views:debug,2014-08-19T16:49:13.410,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/402. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:13.411,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",402,active,0} [ns_server:debug,2014-08-19T16:49:13.493,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 400. Nacking mccouch update. [views:debug,2014-08-19T16:49:13.493,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/400. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:13.494,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",400,active,0} [ns_server:debug,2014-08-19T16:49:13.494,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,426,724,660,596,532,958,894,830,464,400, 1022,762,698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778, 476,412,710,646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790, 488,424,722,658,594,530,956,892,828,462,1020,760,696,632,568,994,930,866,802, 500,436,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814, 448,1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890,826, 460,1018,758,694,630,566,992,928,864,800,498,434,732,668,604,540,966,902,838, 774,472,408,706,642,578,514,940,876,812,510,446,1004,744,680,616,552,978,946, 914,882,850,818,786,484,452,420,1010,718,654,590,526,952,888,824,458,1016, 756,692,628,564,990,926,862,798,496,432,730,666,602,538,964,900,836,772,470, 406,704,640,576,512,938,874,810,508,444,1002,742,678,614,550,976,912,848,784, 482,418,716,652,588,524,950,886,822,456,1014,754,690,626,562,988,924,860,796, 494,430,728,664,600,536,962,898,834,770,468,404,766,702,638,574,936,872,808, 506,442,1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522,948,884, 820,454,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534,960,896, 832,768,466,402,764,700,636,572,998,934,870,806,504,440,738,674,610,546,972, 908,844,780,478,414,712,648,584,520,1023] [views:debug,2014-08-19T16:49:13.553,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/400. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:13.553,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",400,active,0} [ns_server:debug,2014-08-19T16:49:13.727,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 398. Nacking mccouch update. [views:debug,2014-08-19T16:49:13.727,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/398. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:13.728,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",398,active,0} [ns_server:debug,2014-08-19T16:49:13.728,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,426,724,660,596,532,958,894,830,464,400, 1022,762,698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778, 476,412,710,646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790, 488,424,722,658,594,530,956,892,828,462,398,1020,760,696,632,568,994,930,866, 802,500,436,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878, 814,448,1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890, 826,460,1018,758,694,630,566,992,928,864,800,498,434,732,668,604,540,966,902, 838,774,472,408,706,642,578,514,940,876,812,510,446,1004,744,680,616,552,978, 946,914,882,850,818,786,484,452,420,1010,718,654,590,526,952,888,824,458, 1016,756,692,628,564,990,926,862,798,496,432,730,666,602,538,964,900,836,772, 470,406,704,640,576,512,938,874,810,508,444,1002,742,678,614,550,976,912,848, 784,482,418,716,652,588,524,950,886,822,456,1014,754,690,626,562,988,924,860, 796,494,430,728,664,600,536,962,898,834,770,468,404,766,702,638,574,936,872, 808,506,442,1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522,948, 884,820,454,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534,960, 896,832,768,466,402,764,700,636,572,998,934,870,806,504,440,738,674,610,546, 972,908,844,780,478,414,712,648,584,520,1023] [views:debug,2014-08-19T16:49:13.812,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/398. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:13.812,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",398,active,0} [ns_server:debug,2014-08-19T16:49:13.941,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 396. Nacking mccouch update. [views:debug,2014-08-19T16:49:13.941,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/396. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:13.942,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",396,active,0} [ns_server:debug,2014-08-19T16:49:13.942,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,426,724,660,596,532,958,894,830,464,400, 1022,762,698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778, 476,412,710,646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790, 488,424,722,658,594,530,956,892,828,462,398,1020,760,696,632,568,994,930,866, 802,500,436,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878, 814,448,1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890, 826,460,396,1018,758,694,630,566,992,928,864,800,498,434,732,668,604,540,966, 902,838,774,472,408,706,642,578,514,940,876,812,510,446,1004,744,680,616,552, 978,946,914,882,850,818,786,484,452,420,1010,718,654,590,526,952,888,824,458, 1016,756,692,628,564,990,926,862,798,496,432,730,666,602,538,964,900,836,772, 470,406,704,640,576,512,938,874,810,508,444,1002,742,678,614,550,976,912,848, 784,482,418,716,652,588,524,950,886,822,456,1014,754,690,626,562,988,924,860, 796,494,430,728,664,600,536,962,898,834,770,468,404,766,702,638,574,936,872, 808,506,442,1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522,948, 884,820,454,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534,960, 896,832,768,466,402,764,700,636,572,998,934,870,806,504,440,738,674,610,546, 972,908,844,780,478,414,712,648,584,520,1023] [views:debug,2014-08-19T16:49:14.026,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/396. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:14.026,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",396,active,0} [ns_server:debug,2014-08-19T16:49:14.200,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 394. Nacking mccouch update. [views:debug,2014-08-19T16:49:14.201,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/394. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:14.201,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",394,active,0} [ns_server:debug,2014-08-19T16:49:14.201,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,426,724,660,596,532,958,894,830,464,400, 1022,762,698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778, 476,412,710,646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790, 488,424,722,658,594,530,956,892,828,462,398,1020,760,696,632,568,994,930,866, 802,500,436,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878, 814,448,1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890, 826,460,396,1018,758,694,630,566,992,928,864,800,498,434,732,668,604,540,966, 902,838,774,472,408,706,642,578,514,940,876,812,510,446,1004,744,680,616,552, 978,914,850,786,484,420,718,654,590,526,952,888,824,458,394,1016,756,692,628, 564,990,926,862,798,496,432,730,666,602,538,964,900,836,772,470,406,704,640, 576,512,938,874,810,508,444,1002,742,678,614,550,976,912,848,784,482,418,716, 652,588,524,950,886,822,456,1014,754,690,626,562,988,924,860,796,494,430,728, 664,600,536,962,898,834,770,468,404,766,702,638,574,936,872,808,506,442,1000, 740,676,612,548,974,910,846,782,480,416,714,650,586,522,948,884,820,454,1012, 752,688,624,560,986,922,858,794,492,428,726,662,598,534,960,896,832,768,466, 402,764,700,636,572,998,934,870,806,504,440,738,674,610,546,972,908,844,780, 478,414,712,648,584,520,1023,946,882,818,452,1010] [views:debug,2014-08-19T16:49:14.259,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/394. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:14.259,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",394,active,0} [ns_server:debug,2014-08-19T16:49:14.435,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 392. Nacking mccouch update. [views:debug,2014-08-19T16:49:14.435,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/392. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:14.435,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",392,active,0} [ns_server:debug,2014-08-19T16:49:14.436,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,426,724,660,596,532,958,894,830,464,400, 1022,762,698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778, 476,412,710,646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790, 488,424,722,658,594,530,956,892,828,462,398,1020,760,696,632,568,994,930,866, 802,500,436,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878, 814,448,1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890, 826,460,396,1018,758,694,630,566,992,928,864,800,498,434,732,668,604,540,966, 902,838,774,472,408,706,642,578,514,940,876,812,510,446,1004,744,680,616,552, 978,914,850,786,484,420,718,654,590,526,952,888,824,458,394,1016,756,692,628, 564,990,926,862,798,496,432,730,666,602,538,964,900,836,772,470,406,704,640, 576,512,938,874,810,508,444,1002,742,678,614,550,976,912,848,784,482,418,716, 652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924,860,796,494,430, 728,664,600,536,962,898,834,770,468,404,766,702,638,574,936,872,808,506,442, 1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522,948,884,820,454, 1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534,960,896,832,768, 466,402,764,700,636,572,998,934,870,806,504,440,738,674,610,546,972,908,844, 780,478,414,712,648,584,520,1023,946,882,818,452,1010] [views:debug,2014-08-19T16:49:14.494,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/392. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:14.494,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",392,active,0} [ns_server:debug,2014-08-19T16:49:14.669,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 390. Nacking mccouch update. [views:debug,2014-08-19T16:49:14.669,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/390. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:14.669,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",390,active,0} [ns_server:debug,2014-08-19T16:49:14.670,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,426,724,660,596,532,958,894,830,464,400, 1022,762,698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778, 476,412,710,646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790, 488,424,722,658,594,530,956,892,828,462,398,1020,760,696,632,568,994,930,866, 802,500,436,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878, 814,448,1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890, 826,460,396,1018,758,694,630,566,992,928,864,800,498,434,732,668,604,540,966, 902,838,774,472,408,706,642,578,514,940,876,812,510,446,1004,744,680,616,552, 978,914,850,786,484,420,718,654,590,526,952,888,824,458,394,1016,756,692,628, 564,990,926,862,798,496,432,730,666,602,538,964,900,836,772,470,406,704,640, 576,512,938,874,810,508,444,1002,742,678,614,550,976,912,848,784,482,418,716, 652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924,860,796,494,430, 728,664,600,536,962,898,834,770,468,404,766,702,638,574,936,872,808,506,442, 1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522,948,884,820,454, 390,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534,960,896,832, 768,466,402,764,700,636,572,998,934,870,806,504,440,738,674,610,546,972,908, 844,780,478,414,712,648,584,520,1023,946,882,818,452,1010] [views:debug,2014-08-19T16:49:14.753,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/390. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:14.753,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",390,active,0} [ns_server:debug,2014-08-19T16:49:14.928,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 388. Nacking mccouch update. [views:debug,2014-08-19T16:49:14.928,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/388. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:14.928,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",388,active,0} [ns_server:debug,2014-08-19T16:49:14.929,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,426,724,660,596,532,958,894,830,464,400, 1022,762,698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778, 476,412,710,646,582,518,944,880,816,450,1008,748,684,620,556,982,918,854,790, 488,424,722,658,594,530,956,892,828,462,398,1020,760,696,632,568,994,930,866, 802,500,436,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878, 814,448,1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890, 826,460,396,1018,758,694,630,566,992,928,864,800,498,434,732,668,604,540,966, 902,838,774,472,408,706,642,578,514,940,876,812,510,446,1004,744,680,616,552, 978,914,850,786,484,420,718,654,590,526,952,888,824,458,394,1016,756,692,628, 564,990,926,862,798,496,432,730,666,602,538,964,900,836,772,470,406,704,640, 576,512,938,874,810,508,444,1002,742,678,614,550,976,912,848,784,482,418,716, 652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924,860,796,494,430, 728,664,600,536,962,898,834,770,468,404,766,702,638,574,936,872,808,506,442, 1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522,948,884,820,454, 390,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534,960,896,832, 768,466,402,764,700,636,572,998,934,870,806,504,440,738,674,610,546,972,908, 844,780,478,414,712,648,584,520,1023,946,882,818,452,388,1010] [views:debug,2014-08-19T16:49:14.988,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/388. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:14.988,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",388,active,0} [ns_server:debug,2014-08-19T16:49:15.062,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 386. Nacking mccouch update. [views:debug,2014-08-19T16:49:15.062,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/386. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:15.062,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",386,active,0} [ns_server:debug,2014-08-19T16:49:15.063,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,686,622,558,984,920,856,792,490,426,724,660,596,532,958,894,830,464,400, 1022,762,698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778, 476,412,710,646,582,518,944,880,816,450,386,1008,748,684,620,556,982,918,854, 790,488,424,722,658,594,530,956,892,828,462,398,1020,760,696,632,568,994,930, 866,802,500,436,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942, 878,814,448,1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954, 890,826,460,396,1018,758,694,630,566,992,928,864,800,498,434,732,668,604,540, 966,902,838,774,472,408,706,642,578,514,940,876,812,510,446,1004,744,680,616, 552,978,914,850,786,484,420,718,654,590,526,952,888,824,458,394,1016,756,692, 628,564,990,926,862,798,496,432,730,666,602,538,964,900,836,772,470,406,704, 640,576,512,938,874,810,508,444,1002,742,678,614,550,976,912,848,784,482,418, 716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924,860,796,494, 430,728,664,600,536,962,898,834,770,468,404,766,702,638,574,936,872,808,506, 442,1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522,948,884,820, 454,390,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534,960,896, 832,768,466,402,764,700,636,572,998,934,870,806,504,440,738,674,610,546,972, 908,844,780,478,414,712,648,584,520,1023,946,882,818,452,388,1010] [views:debug,2014-08-19T16:49:15.097,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/386. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:15.097,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",386,active,0} [ns_server:debug,2014-08-19T16:49:15.188,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 384. Nacking mccouch update. [views:debug,2014-08-19T16:49:15.188,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/384. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:15.188,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",384,active,0} [ns_server:debug,2014-08-19T16:49:15.189,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,920,856,792,490,426,724,660,596,532,958,894,830,464,400,1022,762, 698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476,412, 710,646,582,518,944,880,816,450,386,1008,748,684,620,556,982,918,854,790,488, 424,722,658,594,530,956,892,828,462,398,1020,760,696,632,568,994,930,866,802, 500,436,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814, 448,384,1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890, 826,460,396,1018,758,694,630,566,992,928,864,800,498,434,732,668,604,540,966, 902,838,774,472,408,706,642,578,514,940,876,812,510,446,1004,744,680,616,552, 978,914,850,786,484,420,718,654,590,526,952,888,824,458,394,1016,756,692,628, 564,990,926,862,798,496,432,730,666,602,538,964,900,836,772,470,406,704,640, 576,512,938,874,810,508,444,1002,742,678,614,550,976,912,848,784,482,418,716, 652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924,860,796,494,430, 728,664,600,536,962,898,834,770,468,404,766,702,638,574,936,872,808,506,442, 1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522,948,884,820,454, 390,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534,960,896,832, 768,466,402,764,700,636,572,998,934,870,806,504,440,738,674,610,546,972,908, 844,780,478,414,712,648,584,520,1023,946,882,818,452,388,1010,686,558] [views:debug,2014-08-19T16:49:15.247,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/384. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:15.247,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",384,active,0} [ns_server:debug,2014-08-19T16:49:15.339,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 382. Nacking mccouch update. [views:debug,2014-08-19T16:49:15.339,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/382. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:15.339,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",382,active,0} [ns_server:debug,2014-08-19T16:49:15.340,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,920,856,792,490,426,724,660,596,532,958,894,830,464,400,1022,762, 698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476,412, 710,646,582,518,944,880,816,450,386,1008,748,684,620,556,982,918,854,790,488, 424,722,658,594,530,956,892,828,462,398,1020,760,696,632,568,994,930,866,802, 500,436,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814, 448,384,1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890, 826,460,396,1018,758,694,630,566,992,928,864,800,498,434,732,668,604,540,966, 902,838,774,472,408,706,642,578,514,940,876,812,510,446,382,1004,744,680,616, 552,978,914,850,786,484,420,718,654,590,526,952,888,824,458,394,1016,756,692, 628,564,990,926,862,798,496,432,730,666,602,538,964,900,836,772,470,406,704, 640,576,512,938,874,810,508,444,1002,742,678,614,550,976,912,848,784,482,418, 716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924,860,796,494, 430,728,664,600,536,962,898,834,770,468,404,766,702,638,574,936,872,808,506, 442,1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522,948,884,820, 454,390,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534,960,896, 832,768,466,402,764,700,636,572,998,934,870,806,504,440,738,674,610,546,972, 908,844,780,478,414,712,648,584,520,1023,946,882,818,452,388,1010,686,558] [views:debug,2014-08-19T16:49:15.389,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/382. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:15.390,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",382,active,0} [ns_server:debug,2014-08-19T16:49:15.481,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 380. Nacking mccouch update. [views:debug,2014-08-19T16:49:15.481,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/380. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:15.481,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",380,active,0} [ns_server:debug,2014-08-19T16:49:15.482,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,920,856,792,490,426,724,660,596,532,958,894,830,464,400,1022,762, 698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476,412, 710,646,582,518,944,880,816,450,386,1008,748,684,620,556,982,918,854,790,488, 424,722,658,594,530,956,892,828,462,398,1020,760,696,632,568,994,930,866,802, 500,436,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814, 448,384,1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890, 826,460,396,1018,758,694,630,566,992,928,864,800,498,434,732,668,604,540,966, 902,838,774,472,408,706,642,578,514,940,876,812,510,446,382,1004,744,680,616, 552,978,914,850,786,484,420,718,654,590,526,952,888,824,458,394,1016,756,692, 628,564,990,926,862,798,496,432,730,666,602,538,964,900,836,772,470,406,704, 640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848,784,482, 418,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924,860,796, 494,430,728,664,600,536,962,898,834,770,468,404,766,702,638,574,936,872,808, 506,442,1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522,948,884, 820,454,390,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534,960, 896,832,768,466,402,764,700,636,572,998,934,870,806,504,440,738,674,610,546, 972,908,844,780,478,414,712,648,584,520,1023,946,882,818,452,388,1010,686, 558] [views:debug,2014-08-19T16:49:15.549,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/380. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:15.549,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",380,active,0} [ns_server:debug,2014-08-19T16:49:15.678,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 378. Nacking mccouch update. [views:debug,2014-08-19T16:49:15.678,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/378. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:15.679,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",378,active,0} [ns_server:debug,2014-08-19T16:49:15.679,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,920,856,792,490,426,724,660,596,532,958,894,830,464,400,1022,762, 698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476,412, 710,646,582,518,944,880,816,450,386,1008,748,684,620,556,982,918,854,790,488, 424,722,658,594,530,956,892,828,462,398,1020,760,696,632,568,994,930,866,802, 500,436,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814, 448,384,1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890, 826,460,396,1018,758,694,630,566,992,928,864,800,498,434,732,668,604,540,966, 902,838,774,472,408,706,642,578,514,940,876,812,510,446,382,1004,744,680,616, 552,978,914,850,786,484,420,718,654,590,526,952,888,824,458,394,1016,756,692, 628,564,990,926,862,798,496,432,730,666,602,538,964,900,836,772,470,406,704, 640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848,784,482, 418,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924,860,796, 494,430,728,664,600,536,962,898,834,770,468,404,766,702,638,574,936,872,808, 506,442,378,1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522,948, 884,820,454,390,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534, 960,896,832,768,466,402,764,700,636,572,998,934,870,806,504,440,738,674,610, 546,972,908,844,780,478,414,712,648,584,520,1023,946,882,818,452,388,1010, 686,558] [views:debug,2014-08-19T16:49:15.737,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/378. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:15.737,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",378,active,0} [ns_server:debug,2014-08-19T16:49:15.821,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 376. Nacking mccouch update. [views:debug,2014-08-19T16:49:15.821,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/376. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:15.821,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",376,active,0} [ns_server:debug,2014-08-19T16:49:15.822,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,920,856,792,490,426,724,660,596,532,958,894,830,464,400,1022,762, 698,634,570,996,932,868,804,502,438,736,672,608,544,970,906,842,778,476,412, 710,646,582,518,944,880,816,450,386,1008,748,684,620,556,982,918,854,790,488, 424,722,658,594,530,956,892,828,462,398,1020,760,696,632,568,994,930,866,802, 500,436,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814, 448,384,1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890, 826,460,396,1018,758,694,630,566,992,928,864,800,498,434,732,668,604,540,966, 902,838,774,472,408,706,642,578,514,940,876,812,510,446,382,1004,744,680,616, 552,978,914,850,786,484,420,718,654,590,526,952,888,824,458,394,1016,756,692, 628,564,990,926,862,798,496,432,730,666,602,538,964,900,836,772,470,406,704, 640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848,784,482, 418,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924,860,796, 494,430,728,664,600,536,962,898,834,770,468,404,766,702,638,574,936,872,808, 506,442,378,1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522,948, 884,820,454,390,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534, 960,896,832,768,466,402,764,700,636,572,998,934,870,806,504,440,376,738,674, 610,546,972,908,844,780,478,414,712,648,584,520,1023,946,882,818,452,388, 1010,686,558] [views:debug,2014-08-19T16:49:15.872,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/376. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:15.872,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",376,active,0} [ns_server:debug,2014-08-19T16:49:16.023,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 374. Nacking mccouch update. [views:debug,2014-08-19T16:49:16.023,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/374. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:16.023,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",374,active,0} [ns_server:debug,2014-08-19T16:49:16.024,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,724,660,596,532,958,894,830,464,400,1022,762,698,634,570, 996,932,868,804,502,438,374,736,672,608,544,970,906,842,778,476,412,710,646, 582,518,944,880,816,450,386,1008,748,684,620,556,982,918,854,790,488,424,722, 658,594,530,956,892,828,462,398,1020,760,696,632,568,994,930,866,802,500,436, 734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814,448,384, 1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890,826,460, 396,1018,758,694,630,566,992,928,864,800,498,434,732,668,604,540,966,902,838, 774,472,408,706,642,578,514,940,876,812,510,446,382,1004,744,680,616,552,978, 914,850,786,484,420,718,654,590,526,952,888,824,458,394,1016,756,692,628,564, 990,926,862,798,496,432,730,666,602,538,964,900,836,772,470,406,704,640,576, 512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848,784,482,418,716, 652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924,860,796,494,430, 728,664,600,536,962,898,834,770,468,404,766,702,638,574,936,872,808,506,442, 378,1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522,948,884,820, 454,390,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534,960,896, 832,768,466,402,764,700,636,572,998,934,870,806,504,440,376,738,674,610,546, 972,908,844,780,478,414,712,648,584,520,1023,946,882,818,452,388,1010,686, 558,920,792,426] [views:debug,2014-08-19T16:49:16.099,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/374. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:16.099,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",374,active,0} [ns_server:debug,2014-08-19T16:49:16.248,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 372. Nacking mccouch update. [views:debug,2014-08-19T16:49:16.249,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/372. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:16.249,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",372,active,0} [ns_server:debug,2014-08-19T16:49:16.249,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,724,660,596,532,958,894,830,464,400,1022,762,698,634,570, 996,932,868,804,502,438,374,736,672,608,544,970,906,842,778,476,412,710,646, 582,518,944,880,816,450,386,1008,748,684,620,556,982,918,854,790,488,424,722, 658,594,530,956,892,828,462,398,1020,760,696,632,568,994,930,866,802,500,436, 372,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814,448, 384,1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890,826, 460,396,1018,758,694,630,566,992,928,864,800,498,434,732,668,604,540,966,902, 838,774,472,408,706,642,578,514,940,876,812,510,446,382,1004,744,680,616,552, 978,914,850,786,484,420,718,654,590,526,952,888,824,458,394,1016,756,692,628, 564,990,926,862,798,496,432,730,666,602,538,964,900,836,772,470,406,704,640, 576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848,784,482,418, 716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924,860,796,494, 430,728,664,600,536,962,898,834,770,468,404,766,702,638,574,936,872,808,506, 442,378,1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522,948,884, 820,454,390,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534,960, 896,832,768,466,402,764,700,636,572,998,934,870,806,504,440,376,738,674,610, 546,972,908,844,780,478,414,712,648,584,520,1023,946,882,818,452,388,1010, 686,558,920,792,426] [views:debug,2014-08-19T16:49:16.300,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/372. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:16.300,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",372,active,0} [ns_server:debug,2014-08-19T16:49:16.431,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 370. Nacking mccouch update. [views:debug,2014-08-19T16:49:16.432,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/370. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:16.432,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",370,active,0} [ns_server:debug,2014-08-19T16:49:16.432,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,724,660,596,532,958,894,830,464,400,1022,762,698,634,570, 996,932,868,804,502,438,374,736,672,608,544,970,906,842,778,476,412,710,646, 582,518,944,880,816,450,386,1008,748,684,620,556,982,918,854,790,488,424,722, 658,594,530,956,892,828,462,398,1020,760,696,632,568,994,930,866,802,500,436, 372,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814,448, 384,1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890,826, 460,396,1018,758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966, 902,838,774,472,408,706,642,578,514,940,876,812,510,446,382,1004,744,680,616, 552,978,914,850,786,484,420,718,654,590,526,952,888,824,458,394,1016,756,692, 628,564,990,926,862,798,496,432,730,666,602,538,964,900,836,772,470,406,704, 640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848,784,482, 418,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924,860,796, 494,430,728,664,600,536,962,898,834,770,468,404,766,702,638,574,936,872,808, 506,442,378,1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522,948, 884,820,454,390,1012,752,688,624,560,986,922,858,794,492,428,726,662,598,534, 960,896,832,768,466,402,764,700,636,572,998,934,870,806,504,440,376,738,674, 610,546,972,908,844,780,478,414,712,648,584,520,1023,946,882,818,452,388, 1010,686,558,920,792,426] [views:debug,2014-08-19T16:49:16.466,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/370. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:16.466,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",370,active,0} [ns_server:debug,2014-08-19T16:49:16.533,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 368. Nacking mccouch update. [views:debug,2014-08-19T16:49:16.534,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/368. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:16.534,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",368,active,0} [ns_server:debug,2014-08-19T16:49:16.534,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,724,660,596,532,958,894,830,464,400,1022,762,698,634,570, 996,932,868,804,502,438,374,736,672,608,544,970,906,842,778,476,412,710,646, 582,518,944,880,816,450,386,1008,748,684,620,556,982,918,854,790,488,424,722, 658,594,530,956,892,828,462,398,1020,760,696,632,568,994,930,866,802,500,436, 372,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814,448, 384,1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890,826, 460,396,1018,758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966, 902,838,774,472,408,706,642,578,514,940,876,812,510,446,382,1004,744,680,616, 552,978,914,850,786,484,420,718,654,590,526,952,888,824,458,394,1016,756,692, 628,564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406, 704,640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848,784, 482,418,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924,860, 796,494,430,728,664,600,536,962,898,834,770,468,404,766,702,638,574,936,872, 808,506,442,378,1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522, 948,884,820,454,390,1012,752,688,624,560,986,922,858,794,492,428,726,662,598, 534,960,896,832,768,466,402,764,700,636,572,998,934,870,806,504,440,376,738, 674,610,546,972,908,844,780,478,414,712,648,584,520,1023,946,882,818,452,388, 1010,686,558,920,792,426] [views:debug,2014-08-19T16:49:16.567,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/368. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:16.568,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",368,active,0} [ns_server:debug,2014-08-19T16:49:16.659,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 366. Nacking mccouch update. [views:debug,2014-08-19T16:49:16.659,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/366. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:16.660,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",366,active,0} [ns_server:debug,2014-08-19T16:49:16.660,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,724,660,596,532,958,894,830,464,400,1022,762,698,634,570, 996,932,868,804,502,438,374,736,672,608,544,970,906,842,778,476,412,710,646, 582,518,944,880,816,450,386,1008,748,684,620,556,982,918,854,790,488,424,722, 658,594,530,956,892,828,462,398,1020,760,696,632,568,994,930,866,802,500,436, 372,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814,448, 384,1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890,826, 460,396,1018,758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966, 902,838,774,472,408,706,642,578,514,940,876,812,510,446,382,1004,744,680,616, 552,978,914,850,786,484,420,718,654,590,526,952,888,824,458,394,1016,756,692, 628,564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406, 704,640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848,784, 482,418,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924,860, 796,494,430,366,728,664,600,536,962,898,834,770,468,404,766,702,638,574,936, 872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480,416,714,650,586, 522,948,884,820,454,390,1012,752,688,624,560,986,922,858,794,492,428,726,662, 598,534,960,896,832,768,466,402,764,700,636,572,998,934,870,806,504,440,376, 738,674,610,546,972,908,844,780,478,414,712,648,584,520,1023,946,882,818,452, 388,1010,686,558,920,792,426] [views:debug,2014-08-19T16:49:16.718,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/366. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:16.719,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",366,active,0} [ns_server:debug,2014-08-19T16:49:16.810,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 364. Nacking mccouch update. [views:debug,2014-08-19T16:49:16.810,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/364. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:16.811,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",364,active,0} [ns_server:debug,2014-08-19T16:49:16.811,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,724,596,958,894,830,464,400,1022,762,698,634,570,996,932, 868,804,502,438,374,736,672,608,544,970,906,842,778,476,412,710,646,582,518, 944,880,816,450,386,1008,748,684,620,556,982,918,854,790,488,424,722,658,594, 530,956,892,828,462,398,1020,760,696,632,568,994,930,866,802,500,436,372,734, 670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814,448,384,1006, 746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890,826,460,396, 1018,758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966,902,838, 774,472,408,706,642,578,514,940,876,812,510,446,382,1004,744,680,616,552,978, 914,850,786,484,420,718,654,590,526,952,888,824,458,394,1016,756,692,628,564, 990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406,704,640, 576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848,784,482,418, 716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924,860,796,494, 430,366,728,664,600,536,962,898,834,770,468,404,766,702,638,574,936,872,808, 506,442,378,1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522,948, 884,820,454,390,1012,752,688,624,560,986,922,858,794,492,428,364,726,662,598, 534,960,896,832,768,466,402,764,700,636,572,998,934,870,806,504,440,376,738, 674,610,546,972,908,844,780,478,414,712,648,584,520,1023,946,882,818,452,388, 1010,686,558,920,792,426,660,532] [views:debug,2014-08-19T16:49:16.869,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/364. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:16.869,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",364,active,0} [ns_server:debug,2014-08-19T16:49:16.936,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 362. Nacking mccouch update. [views:debug,2014-08-19T16:49:16.936,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/362. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:16.936,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",362,active,0} [ns_server:debug,2014-08-19T16:49:16.937,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,894,830,464,400,1022,762,698,634,570,996, 932,868,804,502,438,374,736,672,608,544,970,906,842,778,476,412,710,646,582, 518,944,880,816,450,386,1008,748,684,620,556,982,918,854,790,488,424,722,658, 594,530,956,892,828,462,398,1020,760,696,632,568,994,930,866,802,500,436,372, 734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814,448,384, 1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890,826,460, 396,1018,758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966,902, 838,774,472,408,706,642,578,514,940,876,812,510,446,382,1004,744,680,616,552, 978,914,850,786,484,420,718,654,590,526,952,888,824,458,394,1016,756,692,628, 564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406,704, 640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848,784,482, 418,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924,860,796, 494,430,366,728,664,600,536,962,898,834,770,468,404,766,702,638,574,936,872, 808,506,442,378,1000,740,676,612,548,974,910,846,782,480,416,714,650,586,522, 948,884,820,454,390,1012,752,688,624,560,986,922,858,794,492,428,364,726,662, 598,534,960,896,832,768,466,402,764,700,636,572,998,934,870,806,504,440,376, 738,674,610,546,972,908,844,780,478,414,712,648,584,520,1023,946,882,818,452, 388,1010,686,558,920,792,426,660,532] [views:debug,2014-08-19T16:49:16.984,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/362. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:16.984,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",362,active,0} [ns_server:debug,2014-08-19T16:49:17.141,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 360. Nacking mccouch update. [views:debug,2014-08-19T16:49:17.141,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/360. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:17.142,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",360,active,0} [ns_server:debug,2014-08-19T16:49:17.142,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,894,830,464,400,1022,762,698,634,570,996, 932,868,804,502,438,374,736,672,608,544,970,906,842,778,476,412,710,646,582, 518,944,880,816,450,386,1008,748,684,620,556,982,918,854,790,488,424,360,722, 658,594,530,956,892,828,462,398,1020,760,696,632,568,994,930,866,802,500,436, 372,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814,448, 384,1006,746,682,618,554,980,916,852,788,486,422,720,656,592,528,954,890,826, 460,396,1018,758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966, 902,838,774,472,408,706,642,578,514,940,876,812,510,446,382,1004,744,680,616, 552,978,914,850,786,484,420,718,654,590,526,952,888,824,458,394,1016,756,692, 628,564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406, 704,640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848,784, 482,418,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924,860, 796,494,430,366,728,664,600,536,962,898,834,770,468,404,766,702,638,574,936, 872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480,416,714,650,586, 522,948,884,820,454,390,1012,752,688,624,560,986,922,858,794,492,428,364,726, 662,598,534,960,896,832,768,466,402,764,700,636,572,998,934,870,806,504,440, 376,738,674,610,546,972,908,844,780,478,414,712,648,584,520,1023,946,882,818, 452,388,1010,686,558,920,792,426,660,532] [views:debug,2014-08-19T16:49:17.175,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/360. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:17.175,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",360,active,0} [ns_server:debug,2014-08-19T16:49:17.342,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 358. Nacking mccouch update. [views:debug,2014-08-19T16:49:17.342,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/358. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:17.342,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",358,active,0} [ns_server:debug,2014-08-19T16:49:17.343,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,894,830,464,400,1022,762,698,634,570,996, 932,868,804,502,438,374,736,672,608,544,970,906,842,778,476,412,710,646,582, 518,944,880,816,450,386,1008,748,684,620,556,982,918,854,790,488,424,360,722, 658,594,530,956,892,828,462,398,1020,760,696,632,568,994,930,866,802,500,436, 372,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814,448, 384,1006,746,682,618,554,980,916,852,788,486,422,358,720,656,592,528,954,890, 826,460,396,1018,758,694,630,566,992,928,864,800,498,434,370,732,668,604,540, 966,902,838,774,472,408,706,642,578,514,940,876,812,510,446,382,1004,744,680, 616,552,978,914,850,786,484,420,718,654,590,526,952,888,824,458,394,1016,756, 692,628,564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470, 406,704,640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848, 784,482,418,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924, 860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,766,702,638,574, 936,872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480,416,714,650, 586,522,948,884,820,454,390,1012,752,688,624,560,986,922,858,794,492,428,364, 726,662,598,534,960,896,832,768,466,402,764,700,636,572,998,934,870,806,504, 440,376,738,674,610,546,972,908,844,780,478,414,712,648,584,520,1023,946,882, 818,452,388,1010,686,558,920,792,426,660,532] [views:debug,2014-08-19T16:49:17.401,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/358. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:17.402,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",358,active,0} [ns_server:debug,2014-08-19T16:49:17.568,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 356. Nacking mccouch update. [views:debug,2014-08-19T16:49:17.568,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/356. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:17.568,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",356,active,0} [ns_server:debug,2014-08-19T16:49:17.569,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,894,830,464,400,1022,762,698,634,570,996, 932,868,804,502,438,374,736,672,608,544,970,906,842,778,476,412,710,646,582, 518,944,880,816,450,386,1008,748,684,620,556,982,918,854,790,488,424,360,722, 658,594,530,956,892,828,462,398,1020,760,696,632,568,994,930,866,802,500,436, 372,734,670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814,448, 384,1006,746,682,618,554,980,916,852,788,486,422,358,720,656,592,528,954,890, 826,460,396,1018,758,694,630,566,992,928,864,800,498,434,370,732,668,604,540, 966,902,838,774,472,408,706,642,578,514,940,876,812,510,446,382,1004,744,680, 616,552,978,914,850,786,484,420,356,718,654,590,526,952,888,824,458,394,1016, 756,692,628,564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772, 470,406,704,640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912, 848,784,482,418,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988, 924,860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,766,702,638, 574,936,872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480,416,714, 650,586,522,948,884,820,454,390,1012,752,688,624,560,986,922,858,794,492,428, 364,726,662,598,534,960,896,832,768,466,402,764,700,636,572,998,934,870,806, 504,440,376,738,674,610,546,972,908,844,780,478,414,712,648,584,520,1023,946, 882,818,452,388,1010,686,558,920,792,426,660,532] [views:debug,2014-08-19T16:49:17.602,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/356. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:17.602,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",356,active,0} [ns_server:debug,2014-08-19T16:49:17.677,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 354. Nacking mccouch update. [views:debug,2014-08-19T16:49:17.677,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/354. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:17.677,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",354,active,0} [ns_server:debug,2014-08-19T16:49:17.678,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,762,698,634,570,996,932,868,804, 502,438,374,736,672,608,544,970,906,842,778,476,412,710,646,582,518,944,880, 816,450,386,1008,748,684,620,556,982,918,854,790,488,424,360,722,658,594,530, 956,892,828,462,398,1020,760,696,632,568,994,930,866,802,500,436,372,734,670, 606,542,968,904,840,776,474,410,708,644,580,516,942,878,814,448,384,1006,746, 682,618,554,980,916,852,788,486,422,358,720,656,592,528,954,890,826,460,396, 1018,758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966,902,838, 774,472,408,706,642,578,514,940,876,812,510,446,382,1004,744,680,616,552,978, 914,850,786,484,420,356,718,654,590,526,952,888,824,458,394,1016,756,692,628, 564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406,704, 640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848,784,482, 418,354,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924,860, 796,494,430,366,728,664,600,536,962,898,834,770,468,404,766,702,638,574,936, 872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480,416,714,650,586, 522,948,884,820,454,390,1012,752,688,624,560,986,922,858,794,492,428,364,726, 662,598,534,960,896,832,768,466,402,764,700,636,572,998,934,870,806,504,440, 376,738,674,610,546,972,908,844,780,478,414,712,648,584,520,1023,946,882,818, 452,388,1010,686,558,920,792,426,660,532,894,400,1022] [views:debug,2014-08-19T16:49:17.736,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/354. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:17.736,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",354,active,0} [ns_server:debug,2014-08-19T16:49:17.836,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 352. Nacking mccouch update. [views:debug,2014-08-19T16:49:17.837,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/352. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:17.837,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",352,active,0} [ns_server:debug,2014-08-19T16:49:17.837,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,762,698,634,570,996,932,868,804, 502,438,374,736,672,608,544,970,906,842,778,476,412,710,646,582,518,944,880, 816,450,386,1008,748,684,620,556,982,918,854,790,488,424,360,722,658,594,530, 956,892,828,462,398,1020,760,696,632,568,994,930,866,802,500,436,372,734,670, 606,542,968,904,840,776,474,410,708,644,580,516,942,878,814,448,384,1006,746, 682,618,554,980,916,852,788,486,422,358,720,656,592,528,954,890,826,460,396, 1018,758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966,902,838, 774,472,408,706,642,578,514,940,876,812,510,446,382,1004,744,680,616,552,978, 914,850,786,484,420,356,718,654,590,526,952,888,824,458,394,1016,756,692,628, 564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406,704, 640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848,784,482, 418,354,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924,860, 796,494,430,366,728,664,600,536,962,898,834,770,468,404,766,702,638,574,936, 872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480,416,352,714,650, 586,522,948,884,820,454,390,1012,752,688,624,560,986,922,858,794,492,428,364, 726,662,598,534,960,896,832,768,466,402,764,700,636,572,998,934,870,806,504, 440,376,738,674,610,546,972,908,844,780,478,414,712,648,584,520,1023,946,882, 818,452,388,1010,686,558,920,792,426,660,532,894,400,1022] [views:debug,2014-08-19T16:49:17.871,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/352. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:17.871,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",352,active,0} [ns_server:debug,2014-08-19T16:49:17.937,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 350. Nacking mccouch update. [views:debug,2014-08-19T16:49:17.937,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/350. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:17.938,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",350,active,0} [ns_server:debug,2014-08-19T16:49:17.938,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,762,698,634,570,996,932,868,804, 502,438,374,736,672,608,544,970,906,842,778,476,412,710,646,582,518,944,880, 816,450,386,1008,748,684,620,556,982,918,854,790,488,424,360,722,658,594,530, 956,892,828,462,398,1020,760,696,632,568,994,930,866,802,500,436,372,734,670, 606,542,968,904,840,776,474,410,708,644,580,516,942,878,814,448,384,1006,746, 682,618,554,980,916,852,788,486,422,358,720,656,592,528,954,890,826,460,396, 1018,758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966,902,838, 774,472,408,706,642,578,514,940,876,812,510,446,382,1004,744,680,616,552,978, 914,850,786,484,420,356,718,654,590,526,952,888,824,458,394,1016,756,692,628, 564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406,704, 640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848,784,482, 418,354,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924,860, 796,494,430,366,728,664,600,536,962,898,834,770,468,404,766,702,638,574,936, 872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480,416,352,714,650, 586,522,948,884,820,454,390,1012,752,688,624,560,986,922,858,794,492,428,364, 726,662,598,534,960,896,832,768,466,402,764,700,636,572,998,934,870,806,504, 440,376,738,674,610,546,972,908,844,780,478,414,350,712,648,584,520,1023,946, 882,818,452,388,1010,686,558,920,792,426,660,532,894,400,1022] [views:debug,2014-08-19T16:49:17.971,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/350. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:17.971,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",350,active,0} [ns_server:debug,2014-08-19T16:49:18.038,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 348. Nacking mccouch update. [views:debug,2014-08-19T16:49:18.038,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/348. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:18.039,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",348,active,0} [ns_server:debug,2014-08-19T16:49:18.039,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,762,698,634,570,996,932,868,804, 502,438,374,736,672,608,544,970,906,842,778,476,412,348,710,646,582,518,944, 880,816,450,386,1008,748,684,620,556,982,918,854,790,488,424,360,722,658,594, 530,956,892,828,462,398,1020,760,696,632,568,994,930,866,802,500,436,372,734, 670,606,542,968,904,840,776,474,410,708,644,580,516,942,878,814,448,384,1006, 746,682,618,554,980,916,852,788,486,422,358,720,656,592,528,954,890,826,460, 396,1018,758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966,902, 838,774,472,408,706,642,578,514,940,876,812,510,446,382,1004,744,680,616,552, 978,914,850,786,484,420,356,718,654,590,526,952,888,824,458,394,1016,756,692, 628,564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406, 704,640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848,784, 482,418,354,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924, 860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,766,702,638,574, 936,872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480,416,352,714, 650,586,522,948,884,820,454,390,1012,752,688,624,560,986,922,858,794,492,428, 364,726,662,598,534,960,896,832,768,466,402,764,700,636,572,998,934,870,806, 504,440,376,738,674,610,546,972,908,844,780,478,414,350,712,648,584,520,1023, 946,882,818,452,388,1010,686,558,920,792,426,660,532,894,400,1022] [views:debug,2014-08-19T16:49:18.072,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/348. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:18.072,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",348,active,0} [ns_server:debug,2014-08-19T16:49:18.156,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 346. Nacking mccouch update. [views:debug,2014-08-19T16:49:18.156,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/346. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:18.156,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",346,active,0} [ns_server:debug,2014-08-19T16:49:18.157,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,762,698,634,570,996,932,868,804, 502,438,374,736,672,608,544,970,906,842,778,476,412,348,710,646,582,518,944, 880,816,450,386,1008,748,684,620,556,982,918,854,790,488,424,360,722,658,594, 530,956,892,828,462,398,1020,760,696,632,568,994,930,866,802,500,436,372,734, 670,606,542,968,904,840,776,474,410,346,708,644,580,516,942,878,814,448,384, 1006,746,682,618,554,980,916,852,788,486,422,358,720,656,592,528,954,890,826, 460,396,1018,758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966, 902,838,774,472,408,706,642,578,514,940,876,812,510,446,382,1004,744,680,616, 552,978,914,850,786,484,420,356,718,654,590,526,952,888,824,458,394,1016,756, 692,628,564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470, 406,704,640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848, 784,482,418,354,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988, 924,860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,766,702,638, 574,936,872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480,416,352, 714,650,586,522,948,884,820,454,390,1012,752,688,624,560,986,922,858,794,492, 428,364,726,662,598,534,960,896,832,768,466,402,764,700,636,572,998,934,870, 806,504,440,376,738,674,610,546,972,908,844,780,478,414,350,712,648,584,520, 1023,946,882,818,452,388,1010,686,558,920,792,426,660,532,894,400,1022] [views:debug,2014-08-19T16:49:18.206,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/346. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:18.206,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",346,active,0} [ns_server:debug,2014-08-19T16:49:18.361,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 344. Nacking mccouch update. [views:debug,2014-08-19T16:49:18.361,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/344. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:18.361,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",344,active,0} [ns_server:debug,2014-08-19T16:49:18.362,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,698,570,996,932,868,804,502,438, 374,736,672,608,544,970,906,842,778,476,412,348,710,646,582,518,944,880,816, 450,386,1008,748,684,620,556,982,918,854,790,488,424,360,722,658,594,530,956, 892,828,462,398,1020,760,696,632,568,994,930,866,802,500,436,372,734,670,606, 542,968,904,840,776,474,410,346,708,644,580,516,942,878,814,448,384,1006,746, 682,618,554,980,916,852,788,486,422,358,720,656,592,528,954,890,826,460,396, 1018,758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966,902,838, 774,472,408,344,706,642,578,514,940,876,812,510,446,382,1004,744,680,616,552, 978,914,850,786,484,420,356,718,654,590,526,952,888,824,458,394,1016,756,692, 628,564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406, 704,640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848,784, 482,418,354,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924, 860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,766,702,638,574, 936,872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480,416,352,714, 650,586,522,948,884,820,454,390,1012,752,688,624,560,986,922,858,794,492,428, 364,726,662,598,534,960,896,832,768,466,402,764,700,636,572,998,934,870,806, 504,440,376,738,674,610,546,972,908,844,780,478,414,350,712,648,584,520,1023, 946,882,818,452,388,1010,686,558,920,792,426,660,532,894,400,1022,762,634] [views:debug,2014-08-19T16:49:18.428,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/344. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:18.429,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",344,active,0} [ns_server:debug,2014-08-19T16:49:18.495,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 342. Nacking mccouch update. [views:debug,2014-08-19T16:49:18.495,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/342. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:18.495,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",342,active,0} [ns_server:debug,2014-08-19T16:49:18.496,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,698,570,996,932,868,804,502,438, 374,736,672,608,544,970,906,842,778,476,412,348,710,646,582,518,944,880,816, 450,386,1008,748,684,620,556,982,918,854,790,488,424,360,722,658,594,530,956, 892,828,462,398,1020,760,696,632,568,994,930,866,802,500,436,372,734,670,606, 542,968,904,840,776,474,410,346,708,644,580,516,942,878,814,448,384,1006,746, 682,618,554,980,916,852,788,486,422,358,720,656,592,528,954,890,826,460,396, 1018,758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966,902,838, 774,472,408,344,706,642,578,514,940,876,812,510,446,382,1004,744,680,616,552, 978,914,850,786,484,420,356,718,654,590,526,952,888,824,458,394,1016,756,692, 628,564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406, 342,704,640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848, 784,482,418,354,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988, 924,860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,766,702,638, 574,936,872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480,416,352, 714,650,586,522,948,884,820,454,390,1012,752,688,624,560,986,922,858,794,492, 428,364,726,662,598,534,960,896,832,768,466,402,764,700,636,572,998,934,870, 806,504,440,376,738,674,610,546,972,908,844,780,478,414,350,712,648,584,520, 1023,946,882,818,452,388,1010,686,558,920,792,426,660,532,894,400,1022,762, 634] [views:debug,2014-08-19T16:49:18.529,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/342. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:18.529,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",342,active,0} [ns_server:debug,2014-08-19T16:49:18.612,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 340. Nacking mccouch update. [views:debug,2014-08-19T16:49:18.613,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/340. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:18.613,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",340,active,0} [ns_server:debug,2014-08-19T16:49:18.613,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,698,570,996,932,868,804,502,438, 374,736,672,608,544,970,906,842,778,476,412,348,710,646,582,518,944,880,816, 450,386,1008,748,684,620,556,982,918,854,790,488,424,360,722,658,594,530,956, 892,828,462,398,1020,760,696,632,568,994,930,866,802,500,436,372,734,670,606, 542,968,904,840,776,474,410,346,708,644,580,516,942,878,814,448,384,1006,746, 682,618,554,980,916,852,788,486,422,358,720,656,592,528,954,890,826,460,396, 1018,758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966,902,838, 774,472,408,344,706,642,578,514,940,876,812,510,446,382,1004,744,680,616,552, 978,914,850,786,484,420,356,718,654,590,526,952,888,824,458,394,1016,756,692, 628,564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406, 342,704,640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848, 784,482,418,354,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988, 924,860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,340,766,702, 638,574,936,872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480,416, 352,714,650,586,522,948,884,820,454,390,1012,752,688,624,560,986,922,858,794, 492,428,364,726,662,598,534,960,896,832,768,466,402,764,700,636,572,998,934, 870,806,504,440,376,738,674,610,546,972,908,844,780,478,414,350,712,648,584, 520,1023,946,882,818,452,388,1010,686,558,920,792,426,660,532,894,400,1022, 762,634] [views:debug,2014-08-19T16:49:18.655,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/340. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:18.656,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",340,active,0} [ns_server:debug,2014-08-19T16:49:18.722,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 338. Nacking mccouch update. [views:debug,2014-08-19T16:49:18.722,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/338. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:18.722,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",338,active,0} [ns_server:debug,2014-08-19T16:49:18.723,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,698,570,996,932,868,804,502,438, 374,736,672,608,544,970,906,842,778,476,412,348,710,646,582,518,944,880,816, 450,386,1008,748,684,620,556,982,918,854,790,488,424,360,722,658,594,530,956, 892,828,462,398,1020,760,696,632,568,994,930,866,802,500,436,372,734,670,606, 542,968,904,840,776,474,410,346,708,644,580,516,942,878,814,448,384,1006,746, 682,618,554,980,916,852,788,486,422,358,720,656,592,528,954,890,826,460,396, 1018,758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966,902,838, 774,472,408,344,706,642,578,514,940,876,812,510,446,382,1004,744,680,616,552, 978,914,850,786,484,420,356,718,654,590,526,952,888,824,458,394,1016,756,692, 628,564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406, 342,704,640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848, 784,482,418,354,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988, 924,860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,340,766,702, 638,574,936,872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480,416, 352,714,650,586,522,948,884,820,454,390,1012,752,688,624,560,986,922,858,794, 492,428,364,726,662,598,534,960,896,832,768,466,402,338,764,700,636,572,998, 934,870,806,504,440,376,738,674,610,546,972,908,844,780,478,414,350,712,648, 584,520,1023,946,882,818,452,388,1010,686,558,920,792,426,660,532,894,400, 1022,762,634] [views:debug,2014-08-19T16:49:18.756,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/338. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:18.756,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",338,active,0} [ns_server:debug,2014-08-19T16:49:18.840,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 336. Nacking mccouch update. [views:debug,2014-08-19T16:49:18.840,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/336. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:18.841,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",336,active,0} [ns_server:debug,2014-08-19T16:49:18.841,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,996,932,868,804,502, 438,374,736,672,608,544,970,906,842,778,476,412,348,710,646,582,518,944,880, 816,450,386,1008,748,684,620,556,982,918,854,790,488,424,360,722,658,594,530, 956,892,828,462,398,1020,760,696,632,568,994,930,866,802,500,436,372,734,670, 606,542,968,904,840,776,474,410,346,708,644,580,516,942,878,814,448,384,1006, 746,682,618,554,980,916,852,788,486,422,358,720,656,592,528,954,890,826,460, 396,1018,758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966,902, 838,774,472,408,344,706,642,578,514,940,876,812,510,446,382,1004,744,680,616, 552,978,914,850,786,484,420,356,718,654,590,526,952,888,824,458,394,1016,756, 692,628,564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470, 406,342,704,640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912, 848,784,482,418,354,716,652,588,524,950,886,822,456,392,1014,754,690,626,562, 988,924,860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,340,766, 702,638,574,936,872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480, 416,352,714,650,586,522,948,884,820,454,390,1012,752,688,624,560,986,922,858, 794,492,428,364,726,662,598,534,960,896,832,768,466,402,338,764,700,636,572, 998,934,870,806,504,440,376,738,674,610,546,972,908,844,780,478,414,350,712, 648,584,520,1023,946,882,818,452,388,1010,686,558,920,792,426,660,532,894, 400,1022,762,634] [views:debug,2014-08-19T16:49:18.891,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/336. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:18.891,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",336,active,0} [ns_server:debug,2014-08-19T16:49:19.032,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 334. Nacking mccouch update. [views:debug,2014-08-19T16:49:19.032,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/334. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:19.032,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",334,active,0} [ns_server:debug,2014-08-19T16:49:19.033,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,736,672, 608,544,970,906,842,778,476,412,348,710,646,582,518,944,880,816,450,386,1008, 748,684,620,556,982,918,854,790,488,424,360,722,658,594,530,956,892,828,462, 398,334,1020,760,696,632,568,994,930,866,802,500,436,372,734,670,606,542,968, 904,840,776,474,410,346,708,644,580,516,942,878,814,448,384,1006,746,682,618, 554,980,916,852,788,486,422,358,720,656,592,528,954,890,826,460,396,1018,758, 694,630,566,992,928,864,800,498,434,370,732,668,604,540,966,902,838,774,472, 408,344,706,642,578,514,940,876,812,510,446,382,1004,744,680,616,552,978,914, 850,786,484,420,356,718,654,590,526,952,888,824,458,394,1016,756,692,628,564, 990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406,342,704, 640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848,784,482, 418,354,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924,860, 796,494,430,366,728,664,600,536,962,898,834,770,468,404,340,766,702,638,574, 936,872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480,416,352,714, 650,586,522,948,884,820,454,390,1012,752,688,624,560,986,922,858,794,492,428, 364,726,662,598,534,960,896,832,768,466,402,338,764,700,636,572,998,934,870, 806,504,440,376,738,674,610,546,972,908,844,780,478,414,350,712,648,584,520, 1023,946,882,818,452,388,1010,686,558,920,792,426,660,532,894,400,1022,762, 634,996,868,502,374] [views:debug,2014-08-19T16:49:19.083,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/334. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:19.083,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",334,active,0} [ns_server:debug,2014-08-19T16:49:19.241,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 332. Nacking mccouch update. [views:debug,2014-08-19T16:49:19.241,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/332. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:19.241,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",332,active,0} [ns_server:debug,2014-08-19T16:49:19.242,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,736,672, 608,544,970,906,842,778,476,412,348,710,646,582,518,944,880,816,450,386,1008, 748,684,620,556,982,918,854,790,488,424,360,722,658,594,530,956,892,828,462, 398,334,1020,760,696,632,568,994,930,866,802,500,436,372,734,670,606,542,968, 904,840,776,474,410,346,708,644,580,516,942,878,814,448,384,1006,746,682,618, 554,980,916,852,788,486,422,358,720,656,592,528,954,890,826,460,396,332,1018, 758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966,902,838,774, 472,408,344,706,642,578,514,940,876,812,510,446,382,1004,744,680,616,552,978, 914,850,786,484,420,356,718,654,590,526,952,888,824,458,394,1016,756,692,628, 564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406,342, 704,640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848,784, 482,418,354,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988,924, 860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,340,766,702,638, 574,936,872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480,416,352, 714,650,586,522,948,884,820,454,390,1012,752,688,624,560,986,922,858,794,492, 428,364,726,662,598,534,960,896,832,768,466,402,338,764,700,636,572,998,934, 870,806,504,440,376,738,674,610,546,972,908,844,780,478,414,350,712,648,584, 520,1023,946,882,818,452,388,1010,686,558,920,792,426,660,532,894,400,1022, 762,634,996,868,502,374] [views:debug,2014-08-19T16:49:19.301,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/332. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:19.302,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",332,active,0} [ns_server:debug,2014-08-19T16:49:19.460,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 330. Nacking mccouch update. [views:debug,2014-08-19T16:49:19.460,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/330. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:19.460,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",330,active,0} [ns_server:debug,2014-08-19T16:49:19.461,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,736,672, 608,544,970,906,842,778,476,412,348,710,646,582,518,944,880,816,450,386,1008, 748,684,620,556,982,918,854,790,488,424,360,722,658,594,530,956,892,828,462, 398,334,1020,760,696,632,568,994,930,866,802,500,436,372,734,670,606,542,968, 904,840,776,474,410,346,708,644,580,516,942,878,814,448,384,1006,746,682,618, 554,980,916,852,788,486,422,358,720,656,592,528,954,890,826,460,396,332,1018, 758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966,902,838,774, 472,408,344,706,642,578,514,940,876,812,510,446,382,1004,744,680,616,552,978, 914,850,786,484,420,356,718,654,590,526,952,888,824,458,394,330,1016,756,692, 628,564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406, 342,704,640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848, 784,482,418,354,716,652,588,524,950,886,822,456,392,1014,754,690,626,562,988, 924,860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,340,766,702, 638,574,936,872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480,416, 352,714,650,586,522,948,884,820,454,390,1012,752,688,624,560,986,922,858,794, 492,428,364,726,662,598,534,960,896,832,768,466,402,338,764,700,636,572,998, 934,870,806,504,440,376,738,674,610,546,972,908,844,780,478,414,350,712,648, 584,520,1023,946,882,818,452,388,1010,686,558,920,792,426,660,532,894,400, 1022,762,634,996,868,502,374] [views:debug,2014-08-19T16:49:19.544,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/330. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:19.544,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",330,active,0} [ns_server:debug,2014-08-19T16:49:19.711,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 328. Nacking mccouch update. [views:debug,2014-08-19T16:49:19.711,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/328. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:19.711,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",328,active,0} [ns_server:debug,2014-08-19T16:49:19.712,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,736,672, 608,544,970,906,842,778,476,412,348,710,646,582,518,944,880,816,450,386,1008, 748,684,620,556,982,918,854,790,488,424,360,722,658,594,530,956,892,828,462, 398,334,1020,760,696,632,568,994,930,866,802,500,436,372,734,670,606,542,968, 904,840,776,474,410,346,708,644,580,516,942,878,814,448,384,1006,746,682,618, 554,980,916,852,788,486,422,358,720,656,592,528,954,890,826,460,396,332,1018, 758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966,902,838,774, 472,408,344,706,642,578,514,940,876,812,510,446,382,1004,744,680,616,552,978, 914,850,786,484,420,356,718,654,590,526,952,888,824,458,394,330,1016,756,692, 628,564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406, 342,704,640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848, 784,482,418,354,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562, 988,924,860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,340,766, 702,638,574,936,872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480, 416,352,714,650,586,522,948,884,820,454,390,1012,752,688,624,560,986,922,858, 794,492,428,364,726,662,598,534,960,896,832,768,466,402,338,764,700,636,572, 998,934,870,806,504,440,376,738,674,610,546,972,908,844,780,478,414,350,712, 648,584,520,1023,946,882,818,452,388,1010,686,558,920,792,426,660,532,894, 400,1022,762,634,996,868,502,374] [views:debug,2014-08-19T16:49:19.770,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/328. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:19.770,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",328,active,0} [ns_server:debug,2014-08-19T16:49:19.891,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 326. Nacking mccouch update. [views:debug,2014-08-19T16:49:19.891,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/326. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:19.891,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",326,active,0} [ns_server:debug,2014-08-19T16:49:19.892,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,736,672, 608,544,970,906,842,778,476,412,348,710,646,582,518,944,880,816,450,386,1008, 748,684,620,556,982,918,854,790,488,424,360,722,658,594,530,956,892,828,462, 398,334,1020,760,696,632,568,994,930,866,802,500,436,372,734,670,606,542,968, 904,840,776,474,410,346,708,644,580,516,942,878,814,448,384,1006,746,682,618, 554,980,916,852,788,486,422,358,720,656,592,528,954,890,826,460,396,332,1018, 758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966,902,838,774, 472,408,344,706,642,578,514,940,876,812,510,446,382,1004,744,680,616,552,978, 914,850,786,484,420,356,718,654,590,526,952,888,824,458,394,330,1016,756,692, 628,564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406, 342,704,640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848, 784,482,418,354,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562, 988,924,860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,340,766, 702,638,574,936,872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480, 416,352,714,650,586,522,948,884,820,454,390,326,1012,752,688,624,560,986,922, 858,794,492,428,364,726,662,598,534,960,896,832,768,466,402,338,764,700,636, 572,998,934,870,806,504,440,376,738,674,610,546,972,908,844,780,478,414,350, 712,648,584,520,1023,946,882,818,452,388,1010,686,558,920,792,426,660,532, 894,400,1022,762,634,996,868,502,374] [views:debug,2014-08-19T16:49:19.925,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/326. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:19.925,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",326,active,0} [ns_server:debug,2014-08-19T16:49:19.991,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 324. Nacking mccouch update. [views:debug,2014-08-19T16:49:19.992,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/324. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:19.992,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",324,active,0} [ns_server:debug,2014-08-19T16:49:19.993,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,672,544, 970,906,842,778,476,412,348,710,646,582,518,944,880,816,450,386,1008,748,684, 620,556,982,918,854,790,488,424,360,722,658,594,530,956,892,828,462,398,334, 1020,760,696,632,568,994,930,866,802,500,436,372,734,670,606,542,968,904,840, 776,474,410,346,708,644,580,516,942,878,814,448,384,1006,746,682,618,554,980, 916,852,788,486,422,358,720,656,592,528,954,890,826,460,396,332,1018,758,694, 630,566,992,928,864,800,498,434,370,732,668,604,540,966,902,838,774,472,408, 344,706,642,578,514,940,876,812,510,446,382,1004,744,680,616,552,978,914,850, 786,484,420,356,718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564, 990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406,342,704, 640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848,784,482, 418,354,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562,988,924, 860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,340,766,702,638, 574,936,872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480,416,352, 714,650,586,522,948,884,820,454,390,326,1012,752,688,624,560,986,922,858,794, 492,428,364,726,662,598,534,960,896,832,768,466,402,338,764,700,636,572,998, 934,870,806,504,440,376,738,674,610,546,972,908,844,780,478,414,350,712,648, 584,520,1023,946,882,818,452,388,324,1010,686,558,920,792,426,660,532,894, 400,1022,762,634,996,868,502,374,736,608] [views:debug,2014-08-19T16:49:20.042,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/324. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:20.042,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",324,active,0} [ns_server:debug,2014-08-19T16:49:20.192,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 322. Nacking mccouch update. [views:debug,2014-08-19T16:49:20.192,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/322. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:20.193,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",322,active,0} [ns_server:debug,2014-08-19T16:49:20.193,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,672,544, 970,906,842,778,476,412,348,710,646,582,518,944,880,816,450,386,322,1008,748, 684,620,556,982,918,854,790,488,424,360,722,658,594,530,956,892,828,462,398, 334,1020,760,696,632,568,994,930,866,802,500,436,372,734,670,606,542,968,904, 840,776,474,410,346,708,644,580,516,942,878,814,448,384,1006,746,682,618,554, 980,916,852,788,486,422,358,720,656,592,528,954,890,826,460,396,332,1018,758, 694,630,566,992,928,864,800,498,434,370,732,668,604,540,966,902,838,774,472, 408,344,706,642,578,514,940,876,812,510,446,382,1004,744,680,616,552,978,914, 850,786,484,420,356,718,654,590,526,952,888,824,458,394,330,1016,756,692,628, 564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406,342, 704,640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848,784, 482,418,354,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562,988, 924,860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,340,766,702, 638,574,936,872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480,416, 352,714,650,586,522,948,884,820,454,390,326,1012,752,688,624,560,986,922,858, 794,492,428,364,726,662,598,534,960,896,832,768,466,402,338,764,700,636,572, 998,934,870,806,504,440,376,738,674,610,546,972,908,844,780,478,414,350,712, 648,584,520,1023,946,882,818,452,388,324,1010,686,558,920,792,426,660,532, 894,400,1022,762,634,996,868,502,374,736,608] [views:debug,2014-08-19T16:49:20.243,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/322. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:20.243,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",322,active,0} [ns_server:debug,2014-08-19T16:49:20.401,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 320. Nacking mccouch update. [views:debug,2014-08-19T16:49:20.402,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/320. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:20.402,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",320,active,0} [ns_server:debug,2014-08-19T16:49:20.403,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,672,544, 970,906,842,778,476,412,348,710,646,582,518,944,880,816,450,386,322,1008,748, 684,620,556,982,918,854,790,488,424,360,722,658,594,530,956,892,828,462,398, 334,1020,760,696,632,568,994,930,866,802,500,436,372,734,670,606,542,968,904, 840,776,474,410,346,708,644,580,516,942,878,814,448,384,320,1006,746,682,618, 554,980,916,852,788,486,422,358,720,656,592,528,954,890,826,460,396,332,1018, 758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966,902,838,774, 472,408,344,706,642,578,514,940,876,812,510,446,382,1004,744,680,616,552,978, 914,850,786,484,420,356,718,654,590,526,952,888,824,458,394,330,1016,756,692, 628,564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406, 342,704,640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912,848, 784,482,418,354,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562, 988,924,860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,340,766, 702,638,574,936,872,808,506,442,378,1000,740,676,612,548,974,910,846,782,480, 416,352,714,650,586,522,948,884,820,454,390,326,1012,752,688,624,560,986,922, 858,794,492,428,364,726,662,598,534,960,896,832,768,466,402,338,764,700,636, 572,998,934,870,806,504,440,376,738,674,610,546,972,908,844,780,478,414,350, 712,648,584,520,1023,946,882,818,452,388,324,1010,686,558,920,792,426,660, 532,894,400,1022,762,634,996,868,502,374,736,608] [views:debug,2014-08-19T16:49:20.486,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/320. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:20.486,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",320,active,0} [ns_server:info,2014-08-19T16:49:20.612,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_pull:341]Pulling config from: 'ns_1@10.242.238.90' [ns_server:debug,2014-08-19T16:49:20.652,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 318. Nacking mccouch update. [views:debug,2014-08-19T16:49:20.652,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/318. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:20.652,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",318,active,0} [ns_server:debug,2014-08-19T16:49:20.653,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,672,544, 970,906,842,778,476,412,348,710,646,582,518,944,880,816,450,386,322,1008,748, 684,620,556,982,918,854,790,488,424,360,722,658,594,530,956,892,828,462,398, 334,1020,760,696,632,568,994,930,866,802,500,436,372,734,670,606,542,968,904, 840,776,474,410,346,708,644,580,516,942,878,814,448,384,320,1006,746,682,618, 554,980,916,852,788,486,422,358,720,656,592,528,954,890,826,460,396,332,1018, 758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966,902,838,774, 472,408,344,706,642,578,514,940,876,812,510,446,382,318,1004,744,680,616,552, 978,914,850,786,484,420,356,718,654,590,526,952,888,824,458,394,330,1016,756, 692,628,564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470, 406,342,704,640,576,512,938,874,810,508,444,380,1002,742,678,614,550,976,912, 848,784,482,418,354,716,652,588,524,950,886,822,456,392,328,1014,754,690,626, 562,988,924,860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,340, 766,702,638,574,936,872,808,506,442,378,1000,740,676,612,548,974,910,846,782, 480,416,352,714,650,586,522,948,884,820,454,390,326,1012,752,688,624,560,986, 922,858,794,492,428,364,726,662,598,534,960,896,832,768,466,402,338,764,700, 636,572,998,934,870,806,504,440,376,738,674,610,546,972,908,844,780,478,414, 350,712,648,584,520,1023,946,882,818,452,388,324,1010,686,558,920,792,426, 660,532,894,400,1022,762,634,996,868,502,374,736,608] [views:debug,2014-08-19T16:49:20.712,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/318. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:20.713,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",318,active,0} [ns_server:debug,2014-08-19T16:49:20.795,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 316. Nacking mccouch update. [views:debug,2014-08-19T16:49:20.795,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/316. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:20.795,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",316,active,0} [ns_server:debug,2014-08-19T16:49:20.796,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,672,544, 970,906,842,778,476,412,348,710,646,582,518,944,880,816,450,386,322,1008,748, 684,620,556,982,918,854,790,488,424,360,722,658,594,530,956,892,828,462,398, 334,1020,760,696,632,568,994,930,866,802,500,436,372,734,670,606,542,968,904, 840,776,474,410,346,708,644,580,516,942,878,814,448,384,320,1006,746,682,618, 554,980,916,852,788,486,422,358,720,656,592,528,954,890,826,460,396,332,1018, 758,694,630,566,992,928,864,800,498,434,370,732,668,604,540,966,902,838,774, 472,408,344,706,642,578,514,940,876,812,510,446,382,318,1004,744,680,616,552, 978,914,850,786,484,420,356,718,654,590,526,952,888,824,458,394,330,1016,756, 692,628,564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470, 406,342,704,640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976, 912,848,784,482,418,354,716,652,588,524,950,886,822,456,392,328,1014,754,690, 626,562,988,924,860,796,494,430,366,728,664,600,536,962,898,834,770,468,404, 340,766,702,638,574,936,872,808,506,442,378,1000,740,676,612,548,974,910,846, 782,480,416,352,714,650,586,522,948,884,820,454,390,326,1012,752,688,624,560, 986,922,858,794,492,428,364,726,662,598,534,960,896,832,768,466,402,338,764, 700,636,572,998,934,870,806,504,440,376,738,674,610,546,972,908,844,780,478, 414,350,712,648,584,520,1023,946,882,818,452,388,324,1010,686,558,920,792, 426,660,532,894,400,1022,762,634,996,868,502,374,736,608] [views:debug,2014-08-19T16:49:20.829,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/316. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:20.829,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",316,active,0} [ns_server:debug,2014-08-19T16:49:20.929,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 314. Nacking mccouch update. [views:debug,2014-08-19T16:49:20.929,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/314. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:20.930,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",314,active,0} [ns_server:debug,2014-08-19T16:49:20.930,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,672,544, 906,778,412,710,646,582,518,944,880,816,450,386,322,1008,748,684,620,556,982, 918,854,790,488,424,360,722,658,594,530,956,892,828,462,398,334,1020,760,696, 632,568,994,930,866,802,500,436,372,734,670,606,542,968,904,840,776,474,410, 346,708,644,580,516,942,878,814,448,384,320,1006,746,682,618,554,980,916,852, 788,486,422,358,720,656,592,528,954,890,826,460,396,332,1018,758,694,630,566, 992,928,864,800,498,434,370,732,668,604,540,966,902,838,774,472,408,344,706, 642,578,514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786, 484,420,356,718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564,990, 926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406,342,704,640, 576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848,784,482, 418,354,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562,988,924, 860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,340,766,702,638, 574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910,846,782,480,416, 352,714,650,586,522,948,884,820,454,390,326,1012,752,688,624,560,986,922,858, 794,492,428,364,726,662,598,534,960,896,832,768,466,402,338,764,700,636,572, 998,934,870,806,504,440,376,738,674,610,546,972,908,844,780,478,414,350,712, 648,584,520,1023,946,882,818,452,388,324,1010,686,558,920,792,426,660,532, 894,400,1022,762,634,996,868,502,374,736,608,970,842,476,348] [views:debug,2014-08-19T16:49:20.988,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/314. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:20.988,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",314,active,0} [ns_server:debug,2014-08-19T16:49:21.088,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 312. Nacking mccouch update. [views:debug,2014-08-19T16:49:21.089,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/312. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:21.089,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",312,active,0} [ns_server:debug,2014-08-19T16:49:21.090,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,672,544, 906,778,412,710,646,582,518,944,880,816,450,386,322,1008,748,684,620,556,982, 918,854,790,488,424,360,722,658,594,530,956,892,828,462,398,334,1020,760,696, 632,568,994,930,866,802,500,436,372,734,670,606,542,968,904,840,776,474,410, 346,708,644,580,516,942,878,814,448,384,320,1006,746,682,618,554,980,916,852, 788,486,422,358,720,656,592,528,954,890,826,460,396,332,1018,758,694,630,566, 992,928,864,800,498,434,370,732,668,604,540,966,902,838,774,472,408,344,706, 642,578,514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786, 484,420,356,718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564,990, 926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406,342,704,640, 576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848,784,482, 418,354,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562,988,924, 860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,340,766,702,638, 574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910,846,782,480,416, 352,714,650,586,522,948,884,820,454,390,326,1012,752,688,624,560,986,922,858, 794,492,428,364,726,662,598,534,960,896,832,768,466,402,338,764,700,636,572, 998,934,870,806,504,440,376,312,738,674,610,546,972,908,844,780,478,414,350, 712,648,584,520,1023,946,882,818,452,388,324,1010,686,558,920,792,426,660, 532,894,400,1022,762,634,996,868,502,374,736,608,970,842,476,348] [views:debug,2014-08-19T16:49:21.148,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/312. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:21.148,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",312,active,0} [ns_server:debug,2014-08-19T16:49:21.298,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 310. Nacking mccouch update. [views:debug,2014-08-19T16:49:21.298,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/310. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:21.298,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",310,active,0} [ns_server:debug,2014-08-19T16:49:21.299,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,710,646,582,518,944,880,816,450,386,322,1008,748,684,620,556, 982,918,854,790,488,424,360,722,658,594,530,956,892,828,462,398,334,1020,760, 696,632,568,994,930,866,802,500,436,372,734,670,606,542,968,904,840,776,474, 410,346,708,644,580,516,942,878,814,448,384,320,1006,746,682,618,554,980,916, 852,788,486,422,358,720,656,592,528,954,890,826,460,396,332,1018,758,694,630, 566,992,928,864,800,498,434,370,732,668,604,540,966,902,838,774,472,408,344, 706,642,578,514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850, 786,484,420,356,718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564, 990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406,342,704, 640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848,784, 482,418,354,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562,988, 924,860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,340,766,702, 638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910,846,782,480, 416,352,714,650,586,522,948,884,820,454,390,326,1012,752,688,624,560,986,922, 858,794,492,428,364,726,662,598,534,960,896,832,768,466,402,338,764,700,636, 572,998,934,870,806,504,440,376,312,738,674,610,546,972,908,844,780,478,414, 350,712,648,584,520,1023,946,882,818,452,388,324,1010,686,558,920,792,426, 660,532,894,400,1022,762,634,996,868,502,374,736,608,970,842,476,348] [views:debug,2014-08-19T16:49:21.332,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/310. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:21.332,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",310,active,0} [ns_server:debug,2014-08-19T16:49:21.461,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 308. Nacking mccouch update. [views:debug,2014-08-19T16:49:21.461,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/308. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:21.461,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",308,active,0} [ns_server:debug,2014-08-19T16:49:21.462,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,710,646,582,518,944,880,816,450,386,322,1008,748,684,620,556, 982,918,854,790,488,424,360,722,658,594,530,956,892,828,462,398,334,1020,760, 696,632,568,994,930,866,802,500,436,372,308,734,670,606,542,968,904,840,776, 474,410,346,708,644,580,516,942,878,814,448,384,320,1006,746,682,618,554,980, 916,852,788,486,422,358,720,656,592,528,954,890,826,460,396,332,1018,758,694, 630,566,992,928,864,800,498,434,370,732,668,604,540,966,902,838,774,472,408, 344,706,642,578,514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914, 850,786,484,420,356,718,654,590,526,952,888,824,458,394,330,1016,756,692,628, 564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406,342, 704,640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848, 784,482,418,354,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562, 988,924,860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,340,766, 702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910,846,782, 480,416,352,714,650,586,522,948,884,820,454,390,326,1012,752,688,624,560,986, 922,858,794,492,428,364,726,662,598,534,960,896,832,768,466,402,338,764,700, 636,572,998,934,870,806,504,440,376,312,738,674,610,546,972,908,844,780,478, 414,350,712,648,584,520,1023,946,882,818,452,388,324,1010,686,558,920,792, 426,660,532,894,400,1022,762,634,996,868,502,374,736,608,970,842,476,348] [views:debug,2014-08-19T16:49:21.546,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/308. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:21.546,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",308,active,0} [ns_server:debug,2014-08-19T16:49:21.720,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 306. Nacking mccouch update. [views:debug,2014-08-19T16:49:21.720,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/306. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:21.720,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",306,active,0} [ns_server:debug,2014-08-19T16:49:21.721,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,710,646,582,518,944,880,816,450,386,322,1008,748,684,620,556, 982,918,854,790,488,424,360,722,658,594,530,956,892,828,462,398,334,1020,760, 696,632,568,994,930,866,802,500,436,372,308,734,670,606,542,968,904,840,776, 474,410,346,708,644,580,516,942,878,814,448,384,320,1006,746,682,618,554,980, 916,852,788,486,422,358,720,656,592,528,954,890,826,460,396,332,1018,758,694, 630,566,992,928,864,800,498,434,370,306,732,668,604,540,966,902,838,774,472, 408,344,706,642,578,514,940,876,812,510,446,382,318,1004,744,680,616,552,978, 914,850,786,484,420,356,718,654,590,526,952,888,824,458,394,330,1016,756,692, 628,564,990,926,862,798,496,432,368,730,666,602,538,964,900,836,772,470,406, 342,704,640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912, 848,784,482,418,354,716,652,588,524,950,886,822,456,392,328,1014,754,690,626, 562,988,924,860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,340, 766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910,846, 782,480,416,352,714,650,586,522,948,884,820,454,390,326,1012,752,688,624,560, 986,922,858,794,492,428,364,726,662,598,534,960,896,832,768,466,402,338,764, 700,636,572,998,934,870,806,504,440,376,312,738,674,610,546,972,908,844,780, 478,414,350,712,648,584,520,1023,946,882,818,452,388,324,1010,686,558,920, 792,426,660,532,894,400,1022,762,634,996,868,502,374,736,608,970,842,476,348] [views:debug,2014-08-19T16:49:21.779,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/306. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:21.779,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",306,active,0} [ns_server:debug,2014-08-19T16:49:21.954,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 304. Nacking mccouch update. [views:debug,2014-08-19T16:49:21.954,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/304. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:21.954,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",304,active,0} [ns_server:debug,2014-08-19T16:49:21.955,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,646,518,944,880,816,450,386,322,1008,748,684,620,556,982,918, 854,790,488,424,360,722,658,594,530,956,892,828,462,398,334,1020,760,696,632, 568,994,930,866,802,500,436,372,308,734,670,606,542,968,904,840,776,474,410, 346,708,644,580,516,942,878,814,448,384,320,1006,746,682,618,554,980,916,852, 788,486,422,358,720,656,592,528,954,890,826,460,396,332,1018,758,694,630,566, 992,928,864,800,498,434,370,306,732,668,604,540,966,902,838,774,472,408,344, 706,642,578,514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850, 786,484,420,356,718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564, 990,926,862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342, 704,640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848, 784,482,418,354,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562, 988,924,860,796,494,430,366,728,664,600,536,962,898,834,770,468,404,340,766, 702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910,846,782, 480,416,352,714,650,586,522,948,884,820,454,390,326,1012,752,688,624,560,986, 922,858,794,492,428,364,726,662,598,534,960,896,832,768,466,402,338,764,700, 636,572,998,934,870,806,504,440,376,312,738,674,610,546,972,908,844,780,478, 414,350,712,648,584,520,1023,946,882,818,452,388,324,1010,686,558,920,792, 426,660,532,894,400,1022,762,634,996,868,502,374,736,608,970,842,476,348,710, 582] [views:debug,2014-08-19T16:49:22.013,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/304. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:22.014,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",304,active,0} [ns_server:debug,2014-08-19T16:49:22.097,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 302. Nacking mccouch update. [views:debug,2014-08-19T16:49:22.097,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/302. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:22.097,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",302,active,0} [ns_server:debug,2014-08-19T16:49:22.098,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,646,518,944,880,816,450,386,322,1008,748,684,620,556,982,918, 854,790,488,424,360,722,658,594,530,956,892,828,462,398,334,1020,760,696,632, 568,994,930,866,802,500,436,372,308,734,670,606,542,968,904,840,776,474,410, 346,708,644,580,516,942,878,814,448,384,320,1006,746,682,618,554,980,916,852, 788,486,422,358,720,656,592,528,954,890,826,460,396,332,1018,758,694,630,566, 992,928,864,800,498,434,370,306,732,668,604,540,966,902,838,774,472,408,344, 706,642,578,514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850, 786,484,420,356,718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564, 990,926,862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342, 704,640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848, 784,482,418,354,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562, 988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404,340, 766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910,846, 782,480,416,352,714,650,586,522,948,884,820,454,390,326,1012,752,688,624,560, 986,922,858,794,492,428,364,726,662,598,534,960,896,832,768,466,402,338,764, 700,636,572,998,934,870,806,504,440,376,312,738,674,610,546,972,908,844,780, 478,414,350,712,648,584,520,1023,946,882,818,452,388,324,1010,686,558,920, 792,426,660,532,894,400,1022,762,634,996,868,502,374,736,608,970,842,476,348, 710,582] [views:debug,2014-08-19T16:49:22.131,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/302. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:22.131,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",302,active,0} [ns_server:debug,2014-08-19T16:49:22.206,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 300. Nacking mccouch update. [views:debug,2014-08-19T16:49:22.206,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/300. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:22.206,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",300,active,0} [ns_server:debug,2014-08-19T16:49:22.207,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,646,518,944,880,816,450,386,322,1008,748,684,620,556,982,918, 854,790,488,424,360,722,658,594,530,956,892,828,462,398,334,1020,760,696,632, 568,994,930,866,802,500,436,372,308,734,670,606,542,968,904,840,776,474,410, 346,708,644,580,516,942,878,814,448,384,320,1006,746,682,618,554,980,916,852, 788,486,422,358,720,656,592,528,954,890,826,460,396,332,1018,758,694,630,566, 992,928,864,800,498,434,370,306,732,668,604,540,966,902,838,774,472,408,344, 706,642,578,514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850, 786,484,420,356,718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564, 990,926,862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342, 704,640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848, 784,482,418,354,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562, 988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404,340, 766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910,846, 782,480,416,352,714,650,586,522,948,884,820,454,390,326,1012,752,688,624,560, 986,922,858,794,492,428,364,300,726,662,598,534,960,896,832,768,466,402,338, 764,700,636,572,998,934,870,806,504,440,376,312,738,674,610,546,972,908,844, 780,478,414,350,712,648,584,520,1023,946,882,818,452,388,324,1010,686,558, 920,792,426,660,532,894,400,1022,762,634,996,868,502,374,736,608,970,842,476, 348,710,582] [views:debug,2014-08-19T16:49:22.283,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/300. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:22.283,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",300,active,0} [ns_server:debug,2014-08-19T16:49:22.457,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 298. Nacking mccouch update. [views:debug,2014-08-19T16:49:22.457,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/298. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:22.457,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",298,active,0} [ns_server:debug,2014-08-19T16:49:22.458,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,646,518,944,880,816,450,386,322,1008,748,684,620,556,982,918, 854,790,488,424,360,722,658,594,530,956,892,828,462,398,334,1020,760,696,632, 568,994,930,866,802,500,436,372,308,734,670,606,542,968,904,840,776,474,410, 346,708,644,580,516,942,878,814,448,384,320,1006,746,682,618,554,980,916,852, 788,486,422,358,720,656,592,528,954,890,826,460,396,332,1018,758,694,630,566, 992,928,864,800,498,434,370,306,732,668,604,540,966,902,838,774,472,408,344, 706,642,578,514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850, 786,484,420,356,718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564, 990,926,862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342, 704,640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848, 784,482,418,354,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562, 988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404,340, 766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910,846, 782,480,416,352,714,650,586,522,948,884,820,454,390,326,1012,752,688,624,560, 986,922,858,794,492,428,364,300,726,662,598,534,960,896,832,768,466,402,338, 764,700,636,572,998,934,870,806,504,440,376,312,738,674,610,546,972,908,844, 780,478,414,350,712,648,584,520,1023,946,882,818,452,388,324,1010,686,558, 920,792,426,298,660,532,894,400,1022,762,634,996,868,502,374,736,608,970,842, 476,348,710,582] [views:debug,2014-08-19T16:49:22.541,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/298. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:22.541,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",298,active,0} [ns_server:debug,2014-08-19T16:49:22.775,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 296. Nacking mccouch update. [views:debug,2014-08-19T16:49:22.775,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/296. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:22.775,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",296,active,0} [ns_server:debug,2014-08-19T16:49:22.776,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,646,518,944,880,816,450,386,322,1008,748,684,620,556,982,918, 854,790,488,424,360,296,722,658,594,530,956,892,828,462,398,334,1020,760,696, 632,568,994,930,866,802,500,436,372,308,734,670,606,542,968,904,840,776,474, 410,346,708,644,580,516,942,878,814,448,384,320,1006,746,682,618,554,980,916, 852,788,486,422,358,720,656,592,528,954,890,826,460,396,332,1018,758,694,630, 566,992,928,864,800,498,434,370,306,732,668,604,540,966,902,838,774,472,408, 344,706,642,578,514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914, 850,786,484,420,356,718,654,590,526,952,888,824,458,394,330,1016,756,692,628, 564,990,926,862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406, 342,704,640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912, 848,784,482,418,354,716,652,588,524,950,886,822,456,392,328,1014,754,690,626, 562,988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404, 340,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910, 846,782,480,416,352,714,650,586,522,948,884,820,454,390,326,1012,752,688,624, 560,986,922,858,794,492,428,364,300,726,662,598,534,960,896,832,768,466,402, 338,764,700,636,572,998,934,870,806,504,440,376,312,738,674,610,546,972,908, 844,780,478,414,350,712,648,584,520,1023,946,882,818,452,388,324,1010,686, 558,920,792,426,298,660,532,894,400,1022,762,634,996,868,502,374,736,608,970, 842,476,348,710,582] [views:debug,2014-08-19T16:49:22.851,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/296. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:22.851,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",296,active,0} [ns_server:debug,2014-08-19T16:49:23.009,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 294. Nacking mccouch update. [views:debug,2014-08-19T16:49:23.009,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/294. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:23.009,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",294,active,0} [ns_server:debug,2014-08-19T16:49:23.010,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,646,518,880,386,1008,748,684,620,556,982,918,854,790,488,424, 360,296,722,658,594,530,956,892,828,462,398,334,1020,760,696,632,568,994,930, 866,802,500,436,372,308,734,670,606,542,968,904,840,776,474,410,346,708,644, 580,516,942,878,814,448,384,320,1006,746,682,618,554,980,916,852,788,486,422, 358,294,720,656,592,528,954,890,826,460,396,332,1018,758,694,630,566,992,928, 864,800,498,434,370,306,732,668,604,540,966,902,838,774,472,408,344,706,642, 578,514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786,484, 420,356,718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564,990,926, 862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,704,640, 576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848,784,482, 418,354,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562,988,924, 860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404,340,766,702, 638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910,846,782,480, 416,352,714,650,586,522,948,884,820,454,390,326,1012,752,688,624,560,986,922, 858,794,492,428,364,300,726,662,598,534,960,896,832,768,466,402,338,764,700, 636,572,998,934,870,806,504,440,376,312,738,674,610,546,972,908,844,780,478, 414,350,712,648,584,520,1023,946,882,818,452,388,324,1010,686,558,920,792, 426,298,660,532,894,400,1022,762,634,996,868,502,374,736,608,970,842,476,348, 710,582,944,816,450,322] [views:debug,2014-08-19T16:49:23.086,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/294. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:23.086,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",294,active,0} [ns_server:debug,2014-08-19T16:49:23.253,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 292. Nacking mccouch update. [views:debug,2014-08-19T16:49:23.253,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/292. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:23.253,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",292,active,0} [ns_server:debug,2014-08-19T16:49:23.254,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,646,518,880,386,1008,748,684,620,556,982,918,854,790,488,424, 360,296,722,658,594,530,956,892,828,462,398,334,1020,760,696,632,568,994,930, 866,802,500,436,372,308,734,670,606,542,968,904,840,776,474,410,346,708,644, 580,516,942,878,814,448,384,320,1006,746,682,618,554,980,916,852,788,486,422, 358,294,720,656,592,528,954,890,826,460,396,332,1018,758,694,630,566,992,928, 864,800,498,434,370,306,732,668,604,540,966,902,838,774,472,408,344,706,642, 578,514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786,484, 420,356,292,718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564,990, 926,862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,704, 640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848,784, 482,418,354,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562,988, 924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404,340,766, 702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910,846,782, 480,416,352,714,650,586,522,948,884,820,454,390,326,1012,752,688,624,560,986, 922,858,794,492,428,364,300,726,662,598,534,960,896,832,768,466,402,338,764, 700,636,572,998,934,870,806,504,440,376,312,738,674,610,546,972,908,844,780, 478,414,350,712,648,584,520,1023,946,882,818,452,388,324,1010,686,558,920, 792,426,298,660,532,894,400,1022,762,634,996,868,502,374,736,608,970,842,476, 348,710,582,944,816,450,322] [views:debug,2014-08-19T16:49:23.328,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/292. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:23.328,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",292,active,0} [ns_server:debug,2014-08-19T16:49:23.415,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 290. Nacking mccouch update. [views:debug,2014-08-19T16:49:23.415,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/290. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:23.415,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",290,active,0} [ns_server:debug,2014-08-19T16:49:23.416,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,646,518,880,386,1008,748,684,620,556,982,918,854,790,488,424, 360,296,722,658,594,530,956,892,828,462,398,334,1020,760,696,632,568,994,930, 866,802,500,436,372,308,734,670,606,542,968,904,840,776,474,410,346,708,644, 580,516,942,878,814,448,384,320,1006,746,682,618,554,980,916,852,788,486,422, 358,294,720,656,592,528,954,890,826,460,396,332,1018,758,694,630,566,992,928, 864,800,498,434,370,306,732,668,604,540,966,902,838,774,472,408,344,706,642, 578,514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786,484, 420,356,292,718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564,990, 926,862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,704, 640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848,784, 482,418,354,290,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562, 988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404,340, 766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910,846, 782,480,416,352,714,650,586,522,948,884,820,454,390,326,1012,752,688,624,560, 986,922,858,794,492,428,364,300,726,662,598,534,960,896,832,768,466,402,338, 764,700,636,572,998,934,870,806,504,440,376,312,738,674,610,546,972,908,844, 780,478,414,350,712,648,584,520,1023,946,882,818,452,388,324,1010,686,558, 920,792,426,298,660,532,894,400,1022,762,634,996,868,502,374,736,608,970,842, 476,348,710,582,944,816,450,322] [views:debug,2014-08-19T16:49:23.448,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/290. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:23.449,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",290,active,0} [ns_server:debug,2014-08-19T16:49:23.517,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 288. Nacking mccouch update. [views:debug,2014-08-19T16:49:23.517,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/288. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:23.517,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",288,active,0} [ns_server:debug,2014-08-19T16:49:23.518,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,646,518,880,386,1008,748,684,620,556,982,918,854,790,488,424, 360,296,722,658,594,530,956,892,828,462,398,334,1020,760,696,632,568,994,930, 866,802,500,436,372,308,734,670,606,542,968,904,840,776,474,410,346,708,644, 580,516,942,878,814,448,384,320,1006,746,682,618,554,980,916,852,788,486,422, 358,294,720,656,592,528,954,890,826,460,396,332,1018,758,694,630,566,992,928, 864,800,498,434,370,306,732,668,604,540,966,902,838,774,472,408,344,706,642, 578,514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786,484, 420,356,292,718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564,990, 926,862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,704, 640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848,784, 482,418,354,290,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562, 988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404,340, 766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910,846, 782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,1012,752,688,624, 560,986,922,858,794,492,428,364,300,726,662,598,534,960,896,832,768,466,402, 338,764,700,636,572,998,934,870,806,504,440,376,312,738,674,610,546,972,908, 844,780,478,414,350,712,648,584,520,1023,946,882,818,452,388,324,1010,686, 558,920,792,426,298,660,532,894,400,1022,762,634,996,868,502,374,736,608,970, 842,476,348,710,582,944,816,450,322] [views:debug,2014-08-19T16:49:23.550,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/288. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:23.551,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",288,active,0} [ns_server:debug,2014-08-19T16:49:23.617,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 286. Nacking mccouch update. [views:debug,2014-08-19T16:49:23.618,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/286. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:23.618,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",286,active,0} [ns_server:debug,2014-08-19T16:49:23.619,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,646,518,880,386,1008,748,684,620,556,982,918,854,790,488,424, 360,296,722,658,594,530,956,892,828,462,398,334,1020,760,696,632,568,994,930, 866,802,500,436,372,308,734,670,606,542,968,904,840,776,474,410,346,708,644, 580,516,942,878,814,448,384,320,1006,746,682,618,554,980,916,852,788,486,422, 358,294,720,656,592,528,954,890,826,460,396,332,1018,758,694,630,566,992,928, 864,800,498,434,370,306,732,668,604,540,966,902,838,774,472,408,344,706,642, 578,514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786,484, 420,356,292,718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564,990, 926,862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,704, 640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848,784, 482,418,354,290,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562, 988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404,340, 766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910,846, 782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,1012,752,688,624, 560,986,922,858,794,492,428,364,300,726,662,598,534,960,896,832,768,466,402, 338,764,700,636,572,998,934,870,806,504,440,376,312,738,674,610,546,972,908, 844,780,478,414,350,286,712,648,584,520,1023,946,882,818,452,388,324,1010, 686,558,920,792,426,298,660,532,894,400,1022,762,634,996,868,502,374,736,608, 970,842,476,348,710,582,944,816,450,322] [views:debug,2014-08-19T16:49:23.651,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/286. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:23.651,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",286,active,0} [ns_server:debug,2014-08-19T16:49:23.743,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 284. Nacking mccouch update. [views:debug,2014-08-19T16:49:23.743,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/284. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:23.744,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",284,active,0} [ns_server:debug,2014-08-19T16:49:23.744,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,1008,748,620,982,918,854,790,488,424,360, 296,722,658,594,530,956,892,828,462,398,334,1020,760,696,632,568,994,930,866, 802,500,436,372,308,734,670,606,542,968,904,840,776,474,410,346,708,644,580, 516,942,878,814,448,384,320,1006,746,682,618,554,980,916,852,788,486,422,358, 294,720,656,592,528,954,890,826,460,396,332,1018,758,694,630,566,992,928,864, 800,498,434,370,306,732,668,604,540,966,902,838,774,472,408,344,706,642,578, 514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786,484,420, 356,292,718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564,990,926, 862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,704,640, 576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848,784,482, 418,354,290,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562,988, 924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404,340,766, 702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910,846,782, 480,416,352,288,714,650,586,522,948,884,820,454,390,326,1012,752,688,624,560, 986,922,858,794,492,428,364,300,726,662,598,534,960,896,832,768,466,402,338, 764,700,636,572,998,934,870,806,504,440,376,312,738,674,610,546,972,908,844, 780,478,414,350,286,712,648,584,520,1023,946,882,818,452,388,324,1010,686, 558,920,792,426,298,660,532,894,400,1022,762,634,996,868,502,374,736,608,970, 842,476,348,710,582,944,816,450,322,684,556] [views:debug,2014-08-19T16:49:23.777,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/284. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:23.777,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",284,active,0} [ns_server:debug,2014-08-19T16:49:23.878,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 282. Nacking mccouch update. [views:debug,2014-08-19T16:49:23.878,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/282. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:23.878,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",282,active,0} [ns_server:debug,2014-08-19T16:49:23.879,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,1008,748,620,982,918,854,790,488,424,360, 296,722,658,594,530,956,892,828,462,398,334,1020,760,696,632,568,994,930,866, 802,500,436,372,308,734,670,606,542,968,904,840,776,474,410,346,282,708,644, 580,516,942,878,814,448,384,320,1006,746,682,618,554,980,916,852,788,486,422, 358,294,720,656,592,528,954,890,826,460,396,332,1018,758,694,630,566,992,928, 864,800,498,434,370,306,732,668,604,540,966,902,838,774,472,408,344,706,642, 578,514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786,484, 420,356,292,718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564,990, 926,862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,704, 640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848,784, 482,418,354,290,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562, 988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404,340, 766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910,846, 782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,1012,752,688,624, 560,986,922,858,794,492,428,364,300,726,662,598,534,960,896,832,768,466,402, 338,764,700,636,572,998,934,870,806,504,440,376,312,738,674,610,546,972,908, 844,780,478,414,350,286,712,648,584,520,1023,946,882,818,452,388,324,1010, 686,558,920,792,426,298,660,532,894,400,1022,762,634,996,868,502,374,736,608, 970,842,476,348,710,582,944,816,450,322,684,556] [views:debug,2014-08-19T16:49:23.962,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/282. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:23.962,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",282,active,0} [ns_server:debug,2014-08-19T16:49:24.112,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 280. Nacking mccouch update. [views:debug,2014-08-19T16:49:24.112,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/280. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:24.112,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",280,active,0} [ns_server:debug,2014-08-19T16:49:24.113,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,1008,748,620,982,918,854,790,488,424,360, 296,722,658,594,530,956,892,828,462,398,334,1020,760,696,632,568,994,930,866, 802,500,436,372,308,734,670,606,542,968,904,840,776,474,410,346,282,708,644, 580,516,942,878,814,448,384,320,1006,746,682,618,554,980,916,852,788,486,422, 358,294,720,656,592,528,954,890,826,460,396,332,1018,758,694,630,566,992,928, 864,800,498,434,370,306,732,668,604,540,966,902,838,774,472,408,344,280,706, 642,578,514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786, 484,420,356,292,718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564, 990,926,862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342, 704,640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848, 784,482,418,354,290,716,652,588,524,950,886,822,456,392,328,1014,754,690,626, 562,988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404, 340,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910, 846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,1012,752,688, 624,560,986,922,858,794,492,428,364,300,726,662,598,534,960,896,832,768,466, 402,338,764,700,636,572,998,934,870,806,504,440,376,312,738,674,610,546,972, 908,844,780,478,414,350,286,712,648,584,520,1023,946,882,818,452,388,324, 1010,686,558,920,792,426,298,660,532,894,400,1022,762,634,996,868,502,374, 736,608,970,842,476,348,710,582,944,816,450,322,684,556] [views:debug,2014-08-19T16:49:24.196,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/280. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:24.196,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",280,active,0} [ns_server:debug,2014-08-19T16:49:24.346,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 278. Nacking mccouch update. [views:debug,2014-08-19T16:49:24.346,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/278. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:24.347,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",278,active,0} [ns_server:debug,2014-08-19T16:49:24.347,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,1008,748,620,982,918,854,790,488,424,360, 296,722,658,594,530,956,892,828,462,398,334,1020,760,696,632,568,994,930,866, 802,500,436,372,308,734,670,606,542,968,904,840,776,474,410,346,282,708,644, 580,516,942,878,814,448,384,320,1006,746,682,618,554,980,916,852,788,486,422, 358,294,720,656,592,528,954,890,826,460,396,332,1018,758,694,630,566,992,928, 864,800,498,434,370,306,732,668,604,540,966,902,838,774,472,408,344,280,706, 642,578,514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786, 484,420,356,292,718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564, 990,926,862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342, 278,704,640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912, 848,784,482,418,354,290,716,652,588,524,950,886,822,456,392,328,1014,754,690, 626,562,988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468, 404,340,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974, 910,846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,1012,752, 688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,960,896,832,768, 466,402,338,764,700,636,572,998,934,870,806,504,440,376,312,738,674,610,546, 972,908,844,780,478,414,350,286,712,648,584,520,1023,946,882,818,452,388,324, 1010,686,558,920,792,426,298,660,532,894,400,1022,762,634,996,868,502,374, 736,608,970,842,476,348,710,582,944,816,450,322,684,556] [views:debug,2014-08-19T16:49:24.430,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/278. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:24.431,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",278,active,0} [ns_server:debug,2014-08-19T16:49:24.580,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 276. Nacking mccouch update. [views:debug,2014-08-19T16:49:24.581,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/276. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:24.581,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",276,active,0} [ns_server:debug,2014-08-19T16:49:24.582,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,1008,748,620,982,918,854,790,488,424,360, 296,722,658,594,530,956,892,828,462,398,334,1020,760,696,632,568,994,930,866, 802,500,436,372,308,734,670,606,542,968,904,840,776,474,410,346,282,708,644, 580,516,942,878,814,448,384,320,1006,746,682,618,554,980,916,852,788,486,422, 358,294,720,656,592,528,954,890,826,460,396,332,1018,758,694,630,566,992,928, 864,800,498,434,370,306,732,668,604,540,966,902,838,774,472,408,344,280,706, 642,578,514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786, 484,420,356,292,718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564, 990,926,862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342, 278,704,640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912, 848,784,482,418,354,290,716,652,588,524,950,886,822,456,392,328,1014,754,690, 626,562,988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468, 404,340,276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548, 974,910,846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,1012, 752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,960,896,832, 768,466,402,338,764,700,636,572,998,934,870,806,504,440,376,312,738,674,610, 546,972,908,844,780,478,414,350,286,712,648,584,520,1023,946,882,818,452,388, 324,1010,686,558,920,792,426,298,660,532,894,400,1022,762,634,996,868,502, 374,736,608,970,842,476,348,710,582,944,816,450,322,684,556] [views:debug,2014-08-19T16:49:24.664,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/276. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:24.665,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",276,active,0} [ns_server:debug,2014-08-19T16:49:24.815,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 274. Nacking mccouch update. [views:debug,2014-08-19T16:49:24.815,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/274. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:24.815,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",274,active,0} [ns_server:debug,2014-08-19T16:49:24.816,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,1008,748,620,982,854,488,360,722,658,594, 530,956,892,828,462,398,334,1020,760,696,632,568,994,930,866,802,500,436,372, 308,734,670,606,542,968,904,840,776,474,410,346,282,708,644,580,516,942,878, 814,448,384,320,1006,746,682,618,554,980,916,852,788,486,422,358,294,720,656, 592,528,954,890,826,460,396,332,1018,758,694,630,566,992,928,864,800,498,434, 370,306,732,668,604,540,966,902,838,774,472,408,344,280,706,642,578,514,940, 876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786,484,420,356,292, 718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564,990,926,862,798, 496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,278,704,640,576, 512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848,784,482,418, 354,290,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562,988,924, 860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404,340,276,766, 702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910,846,782, 480,416,352,288,714,650,586,522,948,884,820,454,390,326,1012,752,688,624,560, 986,922,858,794,492,428,364,300,726,662,598,534,960,896,832,768,466,402,338, 274,764,700,636,572,998,934,870,806,504,440,376,312,738,674,610,546,972,908, 844,780,478,414,350,286,712,648,584,520,1023,946,882,818,452,388,324,1010, 686,558,920,792,426,298,660,532,894,400,1022,762,634,996,868,502,374,736,608, 970,842,476,348,710,582,944,816,450,322,684,556,918,790,424,296] [views:debug,2014-08-19T16:49:24.899,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/274. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:24.899,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",274,active,0} [ns_server:debug,2014-08-19T16:49:24.978,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 272. Nacking mccouch update. [views:debug,2014-08-19T16:49:24.978,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/272. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:24.978,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",272,active,0} [ns_server:debug,2014-08-19T16:49:24.979,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,1008,748,620,982,854,488,360,722,658,594, 530,956,892,828,462,398,334,1020,760,696,632,568,994,930,866,802,500,436,372, 308,734,670,606,542,968,904,840,776,474,410,346,282,708,644,580,516,942,878, 814,448,384,320,1006,746,682,618,554,980,916,852,788,486,422,358,294,720,656, 592,528,954,890,826,460,396,332,1018,758,694,630,566,992,928,864,800,498,434, 370,306,732,668,604,540,966,902,838,774,472,408,344,280,706,642,578,514,940, 876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786,484,420,356,292, 718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564,990,926,862,798, 496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,278,704,640,576, 512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848,784,482,418, 354,290,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562,988,924, 860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404,340,276,766, 702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910,846,782, 480,416,352,288,714,650,586,522,948,884,820,454,390,326,1012,752,688,624,560, 986,922,858,794,492,428,364,300,726,662,598,534,960,896,832,768,466,402,338, 274,764,700,636,572,998,934,870,806,504,440,376,312,738,674,610,546,972,908, 844,780,478,414,350,286,712,648,584,520,1023,946,882,818,452,388,324,1010, 686,558,920,792,426,298,660,532,894,400,272,1022,762,634,996,868,502,374,736, 608,970,842,476,348,710,582,944,816,450,322,684,556,918,790,424,296] [views:debug,2014-08-19T16:49:25.012,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/272. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:25.012,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",272,active,0} [ns_server:debug,2014-08-19T16:49:25.079,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 270. Nacking mccouch update. [views:debug,2014-08-19T16:49:25.079,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/270. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:25.079,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",270,active,0} [ns_server:debug,2014-08-19T16:49:25.080,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,1008,748,620,982,854,488,360,722,658,594, 530,956,892,828,462,398,334,270,1020,760,696,632,568,994,930,866,802,500,436, 372,308,734,670,606,542,968,904,840,776,474,410,346,282,708,644,580,516,942, 878,814,448,384,320,1006,746,682,618,554,980,916,852,788,486,422,358,294,720, 656,592,528,954,890,826,460,396,332,1018,758,694,630,566,992,928,864,800,498, 434,370,306,732,668,604,540,966,902,838,774,472,408,344,280,706,642,578,514, 940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786,484,420,356, 292,718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564,990,926,862, 798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,278,704,640, 576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848,784,482, 418,354,290,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562,988, 924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404,340,276, 766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910,846, 782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,1012,752,688,624, 560,986,922,858,794,492,428,364,300,726,662,598,534,960,896,832,768,466,402, 338,274,764,700,636,572,998,934,870,806,504,440,376,312,738,674,610,546,972, 908,844,780,478,414,350,286,712,648,584,520,1023,946,882,818,452,388,324, 1010,686,558,920,792,426,298,660,532,894,400,272,1022,762,634,996,868,502, 374,736,608,970,842,476,348,710,582,944,816,450,322,684,556,918,790,424,296] [views:debug,2014-08-19T16:49:25.113,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/270. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:25.113,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",270,active,0} [ns_server:debug,2014-08-19T16:49:25.179,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 268. Nacking mccouch update. [views:debug,2014-08-19T16:49:25.180,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/268. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:25.180,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",268,active,0} [ns_server:debug,2014-08-19T16:49:25.180,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,1008,748,620,982,854,488,360,722,658,594, 530,956,892,828,462,398,334,270,1020,760,696,632,568,994,930,866,802,500,436, 372,308,734,670,606,542,968,904,840,776,474,410,346,282,708,644,580,516,942, 878,814,448,384,320,1006,746,682,618,554,980,916,852,788,486,422,358,294,720, 656,592,528,954,890,826,460,396,332,268,1018,758,694,630,566,992,928,864,800, 498,434,370,306,732,668,604,540,966,902,838,774,472,408,344,280,706,642,578, 514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786,484,420, 356,292,718,654,590,526,952,888,824,458,394,330,1016,756,692,628,564,990,926, 862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,278,704, 640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848,784, 482,418,354,290,716,652,588,524,950,886,822,456,392,328,1014,754,690,626,562, 988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404,340, 276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910, 846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,1012,752,688, 624,560,986,922,858,794,492,428,364,300,726,662,598,534,960,896,832,768,466, 402,338,274,764,700,636,572,998,934,870,806,504,440,376,312,738,674,610,546, 972,908,844,780,478,414,350,286,712,648,584,520,1023,946,882,818,452,388,324, 1010,686,558,920,792,426,298,660,532,894,400,272,1022,762,634,996,868,502, 374,736,608,970,842,476,348,710,582,944,816,450,322,684,556,918,790,424,296] [views:debug,2014-08-19T16:49:25.214,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/268. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:25.214,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",268,active,0} [ns_server:debug,2014-08-19T16:49:25.313,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 266. Nacking mccouch update. [views:debug,2014-08-19T16:49:25.314,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/266. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:25.314,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",266,active,0} [ns_server:debug,2014-08-19T16:49:25.314,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,1008,748,620,982,854,488,360,722,658,594, 530,956,892,828,462,398,334,270,1020,760,696,632,568,994,930,866,802,500,436, 372,308,734,670,606,542,968,904,840,776,474,410,346,282,708,644,580,516,942, 878,814,448,384,320,1006,746,682,618,554,980,916,852,788,486,422,358,294,720, 656,592,528,954,890,826,460,396,332,268,1018,758,694,630,566,992,928,864,800, 498,434,370,306,732,668,604,540,966,902,838,774,472,408,344,280,706,642,578, 514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786,484,420, 356,292,718,654,590,526,952,888,824,458,394,330,266,1016,756,692,628,564,990, 926,862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,278, 704,640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848, 784,482,418,354,290,716,652,588,524,950,886,822,456,392,328,1014,754,690,626, 562,988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404, 340,276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974, 910,846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,1012,752, 688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,960,896,832,768, 466,402,338,274,764,700,636,572,998,934,870,806,504,440,376,312,738,674,610, 546,972,908,844,780,478,414,350,286,712,648,584,520,1023,946,882,818,452,388, 324,1010,686,558,920,792,426,298,660,532,894,400,272,1022,762,634,996,868, 502,374,736,608,970,842,476,348,710,582,944,816,450,322,684,556,918,790,424, 296] [views:debug,2014-08-19T16:49:25.348,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/266. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:25.348,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",266,active,0} [ns_server:debug,2014-08-19T16:49:25.515,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 264. Nacking mccouch update. [views:debug,2014-08-19T16:49:25.515,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/264. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:25.515,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",264,active,0} [ns_server:debug,2014-08-19T16:49:25.516,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,1008,748,620,982,854,488,360,722,594,956, 892,828,462,398,334,270,1020,760,696,632,568,994,930,866,802,500,436,372,308, 734,670,606,542,968,904,840,776,474,410,346,282,708,644,580,516,942,878,814, 448,384,320,1006,746,682,618,554,980,916,852,788,486,422,358,294,720,656,592, 528,954,890,826,460,396,332,268,1018,758,694,630,566,992,928,864,800,498,434, 370,306,732,668,604,540,966,902,838,774,472,408,344,280,706,642,578,514,940, 876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786,484,420,356,292, 718,654,590,526,952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862, 798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,278,704,640, 576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848,784,482, 418,354,290,716,652,588,524,950,886,822,456,392,328,264,1014,754,690,626,562, 988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404,340, 276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910, 846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,1012,752,688, 624,560,986,922,858,794,492,428,364,300,726,662,598,534,960,896,832,768,466, 402,338,274,764,700,636,572,998,934,870,806,504,440,376,312,738,674,610,546, 972,908,844,780,478,414,350,286,712,648,584,520,1023,946,882,818,452,388,324, 1010,686,558,920,792,426,298,660,532,894,400,272,1022,762,634,996,868,502, 374,736,608,970,842,476,348,710,582,944,816,450,322,684,556,918,790,424,296, 658,530] [views:debug,2014-08-19T16:49:25.600,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/264. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:25.600,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",264,active,0} [ns_server:debug,2014-08-19T16:49:25.757,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 262. Nacking mccouch update. [views:debug,2014-08-19T16:49:25.758,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/262. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:25.758,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",262,active,0} [ns_server:debug,2014-08-19T16:49:25.759,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,1008,748,620,982,854,488,360,722,594,956, 892,828,462,398,334,270,1020,760,696,632,568,994,930,866,802,500,436,372,308, 734,670,606,542,968,904,840,776,474,410,346,282,708,644,580,516,942,878,814, 448,384,320,1006,746,682,618,554,980,916,852,788,486,422,358,294,720,656,592, 528,954,890,826,460,396,332,268,1018,758,694,630,566,992,928,864,800,498,434, 370,306,732,668,604,540,966,902,838,774,472,408,344,280,706,642,578,514,940, 876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786,484,420,356,292, 718,654,590,526,952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862, 798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,278,704,640, 576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848,784,482, 418,354,290,716,652,588,524,950,886,822,456,392,328,264,1014,754,690,626,562, 988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404,340, 276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910, 846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,262,1012,752, 688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,960,896,832,768, 466,402,338,274,764,700,636,572,998,934,870,806,504,440,376,312,738,674,610, 546,972,908,844,780,478,414,350,286,712,648,584,520,1023,946,882,818,452,388, 324,1010,686,558,920,792,426,298,660,532,894,400,272,1022,762,634,996,868, 502,374,736,608,970,842,476,348,710,582,944,816,450,322,684,556,918,790,424, 296,658,530] [views:debug,2014-08-19T16:49:25.825,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/262. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:25.826,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",262,active,0} [cluster:debug,2014-08-19T16:49:25.905,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:handle_call:153]handling add_node("10.242.238.91", 8091, undefined, ..) [cluster:debug,2014-08-19T16:49:25.908,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:do_add_node_with_connectivity:505]Posting node info to engage_cluster on {"10.242.238.91",8091}: {struct, [{<<"requestedTargetNodeHostname">>,<<"10.242.238.91">>}, {availableStorage, {struct, [{hdd, [{struct, [{path,<<"/">>}, {sizeKBytes,103212320}, {usagePercent,3}]}, {struct, [{path,<<"/dev/shm">>}, {sizeKBytes,49515824}, {usagePercent,0}]}, {struct, [{path,<<"/boot">>}, {sizeKBytes,198337}, {usagePercent,17}]}, {struct, [{path,<<"/data">>}, {sizeKBytes,329573012}, {usagePercent,1}]}, {struct, [{path,<<"/test">>}, {sizeKBytes,528447160}, {usagePercent,1}]}, {struct, [{path,<<"/var/lib/pgsql">>}, {sizeKBytes,1922866992}, {usagePercent,1}]}]}]}}, {memoryQuota,90112}, {storageTotals, {struct, [{ram, {struct, [{total,101408407552}, {quotaTotal,94489280512}, {quotaUsed,13369344000}, {used,13174808576}, {usedByData,31860280}]}}, {hdd, {struct, [{total,1969015799808}, {quotaTotal,1969015799808}, {used,19690157998}, {usedByData,4146949}, {free,1949325641810}]}}]}}, {storage, {struct, [{ssd,[]}, {hdd, [{struct, [{path,<<"/var/lib/pgsql">>}, {index_path,<<"/var/lib/pgsql">>}, {quotaMb,none}, {state,ok}]}]}]}}, {systemStats, {struct, [{cpu_utilization_rate,0.6268282490597576}, {swap_total,0}, {swap_used,0}, {mem_total,101408407552}, {mem_free,89858928640}]}}, {interestingStats, {struct, [{cmd_get,0.0}, {couch_docs_actual_disk_size,4146949}, {couch_docs_data_size,4136032}, {couch_views_actual_disk_size,0}, {couch_views_data_size,0}, {curr_items,0}, {curr_items_tot,0}, {ep_bg_fetched,0.0}, {get_hits,0.0}, {mem_used,31860280}, {ops,0.0}, {vb_replica_curr_items,0}]}}, {uptime,<<"4108">>}, {memoryTotal,101408407552}, {memoryFree,89858928640}, {mcdMemoryReserved,77368}, {mcdMemoryAllocated,77368}, {couchApiBase,<<"http://10.242.238.88:8092/">>}, {otpCookie,<<"xyzevwdfypcplvpp">>}, {clusterMembership,<<"active">>}, {status,<<"healthy">>}, {otpNode,<<"ns_1@10.242.238.88">>}, {thisNode,true}, {hostname,<<"10.242.238.88:8091">>}, {clusterCompatibility,131077}, {version,<<"2.5.1-1083-rel-enterprise">>}, {os,<<"x86_64-unknown-linux-gnu">>}, {ports, {struct, [{httpsMgmt,18091}, {httpsCAPI,18092}, {sslProxy,11214}, {proxy,11211}, {direct,11210}]}}]} [ns_server:debug,2014-08-19T16:49:25.975,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 260. Nacking mccouch update. [views:debug,2014-08-19T16:49:25.975,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/260. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:25.975,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",260,active,0} [ns_server:debug,2014-08-19T16:49:25.976,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,1008,748,620,982,854,488,360,722,594,956, 892,828,462,398,334,270,1020,760,696,632,568,994,930,866,802,500,436,372,308, 734,670,606,542,968,904,840,776,474,410,346,282,708,644,580,516,942,878,814, 448,384,320,1006,746,682,618,554,980,916,852,788,486,422,358,294,720,656,592, 528,954,890,826,460,396,332,268,1018,758,694,630,566,992,928,864,800,498,434, 370,306,732,668,604,540,966,902,838,774,472,408,344,280,706,642,578,514,940, 876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786,484,420,356,292, 718,654,590,526,952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862, 798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,278,704,640, 576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848,784,482, 418,354,290,716,652,588,524,950,886,822,456,392,328,264,1014,754,690,626,562, 988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404,340, 276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910, 846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,262,1012,752, 688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,960,896,832,768, 466,402,338,274,764,700,636,572,998,934,870,806,504,440,376,312,738,674,610, 546,972,908,844,780,478,414,350,286,712,648,584,520,1023,946,882,818,452,388, 324,260,1010,686,558,920,792,426,298,660,532,894,400,272,1022,762,634,996, 868,502,374,736,608,970,842,476,348,710,582,944,816,450,322,684,556,918,790, 424,296,658,530] [views:debug,2014-08-19T16:49:26.051,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/260. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:26.053,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",260,active,0} [cluster:debug,2014-08-19T16:49:26.052,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:do_add_node_with_connectivity:512]Reply from engage_cluster on {"10.242.238.91",8091}: {ok,{struct,[{<<"availableStorage">>, {struct,[{<<"hdd">>, [{struct,[{<<"path">>,<<"/">>}, {<<"sizeKBytes">>,103212320}, {<<"usagePercent">>,3}]}, {struct,[{<<"path">>,<<"/dev/shm">>}, {<<"sizeKBytes">>,49515824}, {<<"usagePercent">>,0}]}, {struct,[{<<"path">>,<<"/boot">>}, {<<"sizeKBytes">>,198337}, {<<"usagePercent">>,17}]}, {struct,[{<<"path">>,<<"/data">>}, {<<"sizeKBytes">>,329573012}, {<<"usagePercent">>,1}]}, {struct,[{<<"path">>,<<"/test">>}, {<<"sizeKBytes">>,528447160}, {<<"usagePercent">>,1}]}, {struct,[{<<"path">>,<<"/var/lib/pgsql">>}, {<<"sizeKBytes">>,1922866992}, {<<"usagePercent">>,1}]}]}]}}, {<<"memoryQuota">>,58026}, {<<"storageTotals">>, {struct,[{<<"ram">>, {struct,[{<<"total">>,101408407552}, {<<"quotaTotal">>,60844670976}, {<<"quotaUsed">>,0}, {<<"used">>,13187182592}, {<<"usedByData">>,0}]}}, {<<"hdd">>, {struct,[{<<"total">>,1969015799808}, {<<"quotaTotal">>,1969015799808}, {<<"used">>,19690157998}, {<<"usedByData">>,0}, {<<"free">>,1949325641810}]}}]}}, {<<"storage">>, {struct,[{<<"ssd">>,[]}, {<<"hdd">>, [{struct,[{<<"path">>,<<"/var/lib/pgsql">>}, {<<"index_path">>,<<"/var/lib/pgsql">>}, {<<"quotaMb">>,<<"none">>}, {<<"state">>,<<"ok">>}]}]}]}}, {<<"systemStats">>, {struct,[{<<"cpu_utilization_rate">>,0.1666666666666667}, {<<"swap_total">>,0}, {<<"swap_used">>,0}, {<<"mem_total">>,101408407552}, {<<"mem_free">>,89875488768}]}}, {<<"interestingStats">>,{struct,[]}}, {<<"uptime">>,<<"3777">>}, {<<"memoryTotal">>,101408407552}, {<<"memoryFree">>,89875488768}, {<<"mcdMemoryReserved">>,77368}, {<<"mcdMemoryAllocated">>,77368}, {<<"couchApiBase">>,<<"http://10.242.238.91:8092/">>}, {<<"otpCookie">>,<<"dcjiebftwfgqkjvr">>}, {<<"clusterMembership">>,<<"active">>}, {<<"status">>,<<"healthy">>}, {<<"otpNode">>,<<"ns_1@10.242.238.91">>}, {<<"thisNode">>,true}, {<<"hostname">>,<<"10.242.238.91:8091">>}, {<<"clusterCompatibility">>,131077}, {<<"version">>,<<"2.5.1-1083-rel-enterprise">>}, {<<"os">>,<<"x86_64-unknown-linux-gnu">>}, {<<"ports">>, {struct,[{<<"httpsMgmt">>,18091}, {<<"httpsCAPI">>,18092}, {<<"sslProxy">>,11214}, {<<"proxy">>,11211}, {<<"direct">>,11210}]}}]}} [cluster:debug,2014-08-19T16:49:26.054,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:verify_otp_connectivity:578]port_please("ns_1", "10.242.238.91") = 21101 [ns_server:debug,2014-08-19T16:49:26.056,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:26.056,ns_1@10.242.238.88:ns_config_events<0.17896.0>:ns_node_disco_conf_events:handle_event:44]ns_node_disco_conf_events config on nodes_wanted [ns_server:debug,2014-08-19T16:49:26.056,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: server_groups -> [[{uuid,<<"0">>}, {name,<<"Group 1">>}, {nodes,['ns_1@10.242.238.88','ns_1@10.242.238.89','ns_1@10.242.238.90', 'ns_1@10.242.238.91']}]] [ns_server:debug,2014-08-19T16:49:26.056,ns_1@10.242.238.88:mb_master<0.20995.0>:mb_master:update_peers:506]List of peers has changed from ['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90'] to ['ns_1@10.242.238.88', 'ns_1@10.242.238.89', 'ns_1@10.242.238.90', 'ns_1@10.242.238.91'] [cluster:info,2014-08-19T16:49:26.056,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:node_add_transaction_finish:727]Started node add transaction by adding node 'ns_1@10.242.238.91' to nodes_wanted (group: undefined) [ns_server:debug,2014-08-19T16:49:26.056,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:26.056,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',membership} -> inactiveAdded [ns_server:debug,2014-08-19T16:49:26.056,ns_1@10.242.238.88:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T16:49:26.056,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: nodes_wanted -> ['ns_1@10.242.238.88','ns_1@10.242.238.89','ns_1@10.242.238.90', 'ns_1@10.242.238.91'] [ns_server:debug,2014-08-19T16:49:26.058,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:26.056,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([nodes_wanted,server_groups, {node,'ns_1@10.242.238.91',membership}]..) [ns_server:debug,2014-08-19T16:49:26.058,ns_1@10.242.238.88:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T16:49:26.058,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [cluster:debug,2014-08-19T16:49:26.060,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:do_add_node_engaged_inner:649]Posting the following to complete_join on "10.242.238.91:8091": {struct, [{<<"targetNode">>,'ns_1@10.242.238.91'}, {availableStorage, {struct, [{hdd, [{struct, [{path,<<"/">>}, {sizeKBytes,103212320}, {usagePercent,3}]}, {struct, [{path,<<"/dev/shm">>}, {sizeKBytes,49515824}, {usagePercent,0}]}, {struct, [{path,<<"/boot">>}, {sizeKBytes,198337}, {usagePercent,17}]}, {struct, [{path,<<"/data">>}, {sizeKBytes,329573012}, {usagePercent,1}]}, {struct, [{path,<<"/test">>}, {sizeKBytes,528447160}, {usagePercent,1}]}, {struct, [{path,<<"/var/lib/pgsql">>}, {sizeKBytes,1922866992}, {usagePercent,1}]}]}]}}, {memoryQuota,90112}, {storageTotals, {struct, [{ram, {struct, [{total,101408407552}, {quotaTotal,94489280512}, {quotaUsed,13369344000}, {used,13174808576}, {usedByData,31860280}]}}, {hdd, {struct, [{total,1969015799808}, {quotaTotal,1969015799808}, {used,19690157998}, {usedByData,4146949}, {free,1949325641810}]}}]}}, {storage, {struct, [{ssd,[]}, {hdd, [{struct, [{path,<<"/var/lib/pgsql">>}, {index_path,<<"/var/lib/pgsql">>}, {quotaMb,none}, {state,ok}]}]}]}}, {systemStats, {struct, [{cpu_utilization_rate,0.6268282490597576}, {swap_total,0}, {swap_used,0}, {mem_total,101408407552}, {mem_free,89858928640}]}}, {interestingStats, {struct, [{cmd_get,0.0}, {couch_docs_actual_disk_size,4146949}, {couch_docs_data_size,4136032}, {couch_views_actual_disk_size,0}, {couch_views_data_size,0}, {curr_items,0}, {curr_items_tot,0}, {ep_bg_fetched,0.0}, {get_hits,0.0}, {mem_used,31860280}, {ops,0.0}, {vb_replica_curr_items,0}]}}, {uptime,<<"4108">>}, {memoryTotal,101408407552}, {memoryFree,89858928640}, {mcdMemoryReserved,77368}, {mcdMemoryAllocated,77368}, {couchApiBase,<<"http://10.242.238.88:8092/">>}, {otpCookie,<<"xyzevwdfypcplvpp">>}, {clusterMembership,<<"active">>}, {status,<<"healthy">>}, {otpNode,<<"ns_1@10.242.238.88">>}, {thisNode,true}, {hostname,<<"10.242.238.88:8091">>}, {clusterCompatibility,131077}, {version,<<"2.5.1-1083-rel-enterprise">>}, {os,<<"x86_64-unknown-linux-gnu">>}, {ports, {struct, [{httpsMgmt,18091}, {httpsCAPI,18092}, {sslProxy,11214}, {proxy,11211}, {direct,11210}]}}]} [ns_server:debug,2014-08-19T16:49:26.110,ns_1@10.242.238.88:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T16:49:26.110,ns_1@10.242.238.88:<0.25011.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@10.242.238.88', 'ns_1@10.242.238.89', 'ns_1@10.242.238.90', 'ns_1@10.242.238.91'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:49:26.115,ns_1@10.242.238.88:<0.25011.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:49:26.226,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 258. Nacking mccouch update. [views:debug,2014-08-19T16:49:26.226,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/258. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:26.226,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",258,active,0} [ns_server:debug,2014-08-19T16:49:26.227,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,258,1008,748,620,982,854,488,360,722,594, 956,892,828,462,398,334,270,1020,760,696,632,568,994,930,866,802,500,436,372, 308,734,670,606,542,968,904,840,776,474,410,346,282,708,644,580,516,942,878, 814,448,384,320,1006,746,682,618,554,980,916,852,788,486,422,358,294,720,656, 592,528,954,890,826,460,396,332,268,1018,758,694,630,566,992,928,864,800,498, 434,370,306,732,668,604,540,966,902,838,774,472,408,344,280,706,642,578,514, 940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786,484,420,356, 292,718,654,590,526,952,888,824,458,394,330,266,1016,756,692,628,564,990,926, 862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,278,704, 640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848,784, 482,418,354,290,716,652,588,524,950,886,822,456,392,328,264,1014,754,690,626, 562,988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404, 340,276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974, 910,846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,262,1012, 752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,960,896,832, 768,466,402,338,274,764,700,636,572,998,934,870,806,504,440,376,312,738,674, 610,546,972,908,844,780,478,414,350,286,712,648,584,520,1023,946,882,818,452, 388,324,260,1010,686,558,920,792,426,298,660,532,894,400,272,1022,762,634, 996,868,502,374,736,608,970,842,476,348,710,582,944,816,450,322,684,556,918, 790,424,296,658,530] [views:debug,2014-08-19T16:49:26.302,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/258. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:26.302,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",258,active,0} [ns_server:debug,2014-08-19T16:49:26.445,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"default">>] [ns_server:info,2014-08-19T16:49:26.448,ns_1@10.242.238.88:<0.25056.0>:compaction_daemon:check_all_dbs_exist:1611]Skipping compaction of bucket `default` since at least database `default/0` seems to be missing. [ns_server:debug,2014-08-19T16:49:26.448,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [user:info,2014-08-19T16:49:26.448,ns_1@10.242.238.88:ns_node_disco<0.17920.0>:ns_node_disco:handle_info:159]Node 'ns_1@10.242.238.88' saw that node 'ns_1@10.242.238.91' came up. Tags: [] [ns_server:debug,2014-08-19T16:49:26.448,ns_1@10.242.238.88:<0.19195.0>:capi_set_view_manager:nodeup_monitoring_loop:176]got nodeup event. Considering ddocs replication [ns_server:debug,2014-08-19T16:49:26.448,ns_1@10.242.238.88:<0.18065.0>:xdc_rdoc_replication_srv:nodeup_monitoring_loop:46]got nodeup event. Considering rdocs replication [ns_server:debug,2014-08-19T16:49:26.448,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:26.448,ns_1@10.242.238.88:xdc_rdoc_replication_srv<0.18064.0>:xdc_rdoc_replication_srv:handle_info:154]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:26.448,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:49:26.448,ns_1@10.242.238.88:ns_node_disco_events<0.17919.0>:ns_node_disco_rep_events:handle_event:42]Detected a new nodes (['ns_1@10.242.238.91']). Moving config around. [ns_server:info,2014-08-19T16:49:26.449,ns_1@10.242.238.88:ns_node_disco_events<0.17919.0>:ns_node_disco_log:handle_event:46]ns_node_disco_log: nodes changed: ['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91'] [ns_server:warn,2014-08-19T16:49:26.449,ns_1@10.242.238.88:xdc_rdoc_replication_srv<0.18064.0>:xdc_rdoc_replication_srv:handle_info:150]Remote server node {xdc_rdoc_replication_srv,'ns_1@10.242.238.91'} process down: noproc [ns_server:debug,2014-08-19T16:49:26.477,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 256. Nacking mccouch update. [views:debug,2014-08-19T16:49:26.477,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/256. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:26.477,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",256,active,0} [ns_server:debug,2014-08-19T16:49:26.478,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,258,1008,748,620,982,854,488,360,722,594, 956,892,828,462,398,334,270,1020,760,696,632,568,994,930,866,802,500,436,372, 308,734,670,606,542,968,904,840,776,474,410,346,282,708,644,580,516,942,878, 814,448,384,320,256,1006,746,682,618,554,980,916,852,788,486,422,358,294,720, 656,592,528,954,890,826,460,396,332,268,1018,758,694,630,566,992,928,864,800, 498,434,370,306,732,668,604,540,966,902,838,774,472,408,344,280,706,642,578, 514,940,876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786,484,420, 356,292,718,654,590,526,952,888,824,458,394,330,266,1016,756,692,628,564,990, 926,862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,278, 704,640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848, 784,482,418,354,290,716,652,588,524,950,886,822,456,392,328,264,1014,754,690, 626,562,988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468, 404,340,276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548, 974,910,846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,262, 1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,960,896, 832,768,466,402,338,274,764,700,636,572,998,934,870,806,504,440,376,312,738, 674,610,546,972,908,844,780,478,414,350,286,712,648,584,520,1023,946,882,818, 452,388,324,260,1010,686,558,920,792,426,298,660,532,894,400,272,1022,762, 634,996,868,502,374,736,608,970,842,476,348,710,582,944,816,450,322,684,556, 918,790,424,296,658,530] [ns_server:debug,2014-08-19T16:49:26.529,ns_1@10.242.238.88:ns_config_events<0.17896.0>:ns_node_disco_conf_events:handle_event:50]ns_node_disco_conf_events config on otp [ns_server:debug,2014-08-19T16:49:26.529,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:26.529,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: otp -> [{cookie,xyzevwdfypcplvpp}] [ns_server:debug,2014-08-19T16:49:26.530,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',capi_port} -> 8092 [ns_server:debug,2014-08-19T16:49:26.530,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',compaction_daemon} -> [{check_interval,30},{min_file_size,131072}] [ns_server:debug,2014-08-19T16:49:26.530,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:26.530,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',config_version} -> {2,3,0} [ns_server:debug,2014-08-19T16:49:26.530,ns_1@10.242.238.88:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_sync:110]ns_cookie_manager do_cookie_sync [ns_server:debug,2014-08-19T16:49:26.530,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:26.530,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',isasl} -> [{path,"/opt/couchbase/var/lib/couchbase/isasl.pw"}] [ns_server:debug,2014-08-19T16:49:26.530,ns_1@10.242.238.88:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_save:147]saving cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server" [ns_server:debug,2014-08-19T16:49:26.531,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',memcached} -> [{mccouch_port,11213}, {engines, [{membase, [{engine,"/opt/couchbase/lib/memcached/ep.so"}, {static_config_string, "vb0=false;waitforwarmup=false;failpartialwarmup=false"}]}, {memcached, [{engine,"/opt/couchbase/lib/memcached/default_engine.so"}, {static_config_string,"vb0=true"}]}]}, {log_path,"/opt/couchbase/var/lib/couchbase/logs"}, {log_prefix,"memcached.log"}, {log_generations,20}, {log_cyclesize,10485760}, {log_sleeptime,19}, {log_rotation_period,39003}, {dedicated_port,11209}, {bucket_engine,"/opt/couchbase/lib/memcached/bucket_engine.so"}, {port,11210}, {dedicated_port,11209}, {admin_user,"_admin"}, {admin_pass,"*****"}, {verbosity,[]}] [ns_server:debug,2014-08-19T16:49:26.531,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:26.531,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',moxi} -> [{port,11211},{verbosity,[]}] [ns_server:debug,2014-08-19T16:49:26.531,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',ns_log} -> [{filename,"/opt/couchbase/var/lib/couchbase/ns_log"}] [ns_server:debug,2014-08-19T16:49:26.531,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:26.531,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:26.531,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:26.532,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:26.532,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',port_servers} -> [{moxi,"/opt/couchbase/bin/moxi", ["-Z", {"port_listen=~B,default_bucket_name=default,downstream_max=1024,downstream_conn_max=4,connect_max_errors=5,connect_retry_interval=30000,connect_timeout=400,auth_timeout=100,cycle=200,downstream_conn_queue_timeout=200,downstream_timeout=5000,wait_queue_timeout=200", [port]}, "-z", {"url=http://127.0.0.1:~B/pools/default/saslBucketsStreaming", [{misc,this_node_rest_port,[]}]}, "-p","0","-Y","y","-O","stderr", {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MOXI_SASL_PLAIN_USR",{"~s",[{ns_moxi_sup,rest_user,[]}]}}, {"MOXI_SASL_PLAIN_PWD",{"~s",[{ns_moxi_sup,rest_pass,[]}]}}]}, use_stdio,exit_status,port_server_send_eol,stderr_to_stdout,stream]}, {memcached,"/opt/couchbase/bin/memcached", ["-X","/opt/couchbase/lib/memcached/stdin_term_handler.so","-X", {"/opt/couchbase/lib/memcached/file_logger.so,cyclesize=~B;sleeptime=~B;filename=~s/~s", [log_cyclesize,log_sleeptime,log_path,log_prefix]}, "-l", {"0.0.0.0:~B,0.0.0.0:~B:1000",[port,dedicated_port]}, "-p", {"~B",[port]}, "-E","/opt/couchbase/lib/memcached/bucket_engine.so","-B", "binary","-r","-c","10000","-e", {"admin=~s;default_bucket_name=default;auto_create=false", [admin_user]}, {"~s",[verbosity]}], [{env,[{"EVENT_NOSELECT","1"}, {"MEMCACHED_TOP_KEYS","100"}, {"ISASL_PWFILE",{"~s",[{isasl,path}]}}]}, use_stdio,stderr_to_stdout,exit_status,port_server_send_eol, stream]}] [ns_server:debug,2014-08-19T16:49:26.532,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',rest} -> [{port,8091},{port_meta,global}] [ns_server:debug,2014-08-19T16:49:26.532,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:26.532,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',ssl_capi_port} -> 18092 [ns_server:debug,2014-08-19T16:49:26.532,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',ssl_proxy_downstream_port} -> 11214 [ns_server:debug,2014-08-19T16:49:26.532,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:26.532,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',ssl_proxy_upstream_port} -> 11215 [ns_server:debug,2014-08-19T16:49:26.532,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',ssl_rest_port} -> 18091 [ns_server:debug,2014-08-19T16:49:26.532,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:26.532,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:26.533,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:26.536,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:26.536,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:49:26.540,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/256. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:26.540,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",256,active,0} [ns_server:debug,2014-08-19T16:49:26.571,ns_1@10.242.238.88:ns_cookie_manager<0.17893.0>:ns_cookie_manager:do_cookie_save:149]attempted to save cookie to "/opt/couchbase/var/lib/couchbase/couchbase-server.cookie-ns-server": ok [ns_server:debug,2014-08-19T16:49:26.571,ns_1@10.242.238.88:<0.25065.0>:ns_node_disco:do_nodes_wanted_updated_fun:199]ns_node_disco: nodes_wanted updated: ['ns_1@10.242.238.88', 'ns_1@10.242.238.89', 'ns_1@10.242.238.90', 'ns_1@10.242.238.91'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:49:26.588,ns_1@10.242.238.88:<0.25065.0>:ns_node_disco:do_nodes_wanted_updated_fun:205]ns_node_disco: nodes_wanted pong: ['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91'], with cookie: xyzevwdfypcplvpp [ns_server:debug,2014-08-19T16:49:26.623,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 254. Nacking mccouch update. [views:debug,2014-08-19T16:49:26.623,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/254. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:26.623,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",254,active,0} [ns_server:debug,2014-08-19T16:49:26.624,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360,722, 594,956,828,462,334,760,696,632,568,994,930,866,802,500,436,372,308,734,670, 606,542,968,904,840,776,474,410,346,282,708,644,580,516,942,878,814,448,384, 320,256,1006,746,682,618,554,980,916,852,788,486,422,358,294,720,656,592,528, 954,890,826,460,396,332,268,1018,758,694,630,566,992,928,864,800,498,434,370, 306,732,668,604,540,966,902,838,774,472,408,344,280,706,642,578,514,940,876, 812,510,446,382,318,1004,744,680,616,552,978,914,850,786,484,420,356,292,718, 654,590,526,952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862,798, 496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,278,704,640,576, 512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848,784,482,418, 354,290,716,652,588,524,950,886,822,456,392,328,264,1014,754,690,626,562,988, 924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404,340,276, 766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910,846, 782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,262,1012,752,688, 624,560,986,922,858,794,492,428,364,300,726,662,598,534,960,896,832,768,466, 402,338,274,764,700,636,572,998,934,870,806,504,440,376,312,738,674,610,546, 972,908,844,780,478,414,350,286,712,648,584,520,1023,946,882,818,452,388,324, 260,1010,686,558,920,792,426,298,660,532,894,400,272,1022,762,634,996,868, 502,374,736,608,970,842,476,348,710,582,944,816,450,322,684,556,918,790,424, 296,658,530,892,398,270,1020] [ns_server:debug,2014-08-19T16:49:26.637,ns_1@10.242.238.88:xdc_rdoc_replication_srv<0.18064.0>:xdc_rdoc_replication_srv:handle_info:154]doing replicate_newnodes_docs [cluster:debug,2014-08-19T16:49:26.639,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:do_add_node_engaged_inner:656]Reply from complete_join on "10.242.238.91:8091": {ok,[]} [cluster:debug,2014-08-19T16:49:26.639,ns_1@10.242.238.88:ns_cluster<0.17894.0>:ns_cluster:handle_call:155]add_node("10.242.238.91", 8091, undefined, ..) -> {ok,'ns_1@10.242.238.91'} [views:debug,2014-08-19T16:49:26.657,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/254. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:26.657,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",254,active,0} [ns_server:debug,2014-08-19T16:49:26.732,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 252. Nacking mccouch update. [views:debug,2014-08-19T16:49:26.732,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/252. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:26.732,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",252,active,0} [ns_server:debug,2014-08-19T16:49:26.733,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360,722, 594,956,828,462,334,760,696,632,568,994,930,866,802,500,436,372,308,734,670, 606,542,968,904,840,776,474,410,346,282,708,644,580,516,942,878,814,448,384, 320,256,1006,746,682,618,554,252,980,916,852,788,486,422,358,294,720,656,592, 528,954,890,826,460,396,332,268,1018,758,694,630,566,992,928,864,800,498,434, 370,306,732,668,604,540,966,902,838,774,472,408,344,280,706,642,578,514,940, 876,812,510,446,382,318,1004,744,680,616,552,978,914,850,786,484,420,356,292, 718,654,590,526,952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862, 798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,278,704,640, 576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848,784,482, 418,354,290,716,652,588,524,950,886,822,456,392,328,264,1014,754,690,626,562, 988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404,340, 276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974,910, 846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,262,1012,752, 688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,960,896,832,768, 466,402,338,274,764,700,636,572,998,934,870,806,504,440,376,312,738,674,610, 546,972,908,844,780,478,414,350,286,712,648,584,520,1023,946,882,818,452,388, 324,260,1010,686,558,920,792,426,298,660,532,894,400,272,1022,762,634,996, 868,502,374,736,608,970,842,476,348,710,582,944,816,450,322,684,556,918,790, 424,296,658,530,892,398,270,1020] [views:debug,2014-08-19T16:49:26.766,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/252. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:26.766,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",252,active,0} [ns_server:debug,2014-08-19T16:49:26.843,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 250. Nacking mccouch update. [views:debug,2014-08-19T16:49:26.843,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/250. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:26.843,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",250,active,0} [ns_server:debug,2014-08-19T16:49:26.844,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360,722, 594,956,828,462,334,760,696,632,568,994,930,866,802,500,436,372,308,734,670, 606,542,968,904,840,776,474,410,346,282,708,644,580,516,942,878,814,448,384, 320,256,1006,746,682,618,554,252,980,916,852,788,486,422,358,294,720,656,592, 528,954,890,826,460,396,332,268,1018,758,694,630,566,992,928,864,800,498,434, 370,306,732,668,604,540,966,902,838,774,472,408,344,280,706,642,578,514,940, 876,812,510,446,382,318,1004,744,680,616,552,250,978,914,850,786,484,420,356, 292,718,654,590,526,952,888,824,458,394,330,266,1016,756,692,628,564,990,926, 862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,278,704, 640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,976,912,848,784, 482,418,354,290,716,652,588,524,950,886,822,456,392,328,264,1014,754,690,626, 562,988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404, 340,276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,974, 910,846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,262,1012, 752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,960,896,832, 768,466,402,338,274,764,700,636,572,998,934,870,806,504,440,376,312,738,674, 610,546,972,908,844,780,478,414,350,286,712,648,584,520,1023,946,882,818,452, 388,324,260,1010,686,558,920,792,426,298,660,532,894,400,272,1022,762,634, 996,868,502,374,736,608,970,842,476,348,710,582,944,816,450,322,684,556,918, 790,424,296,658,530,892,398,270,1020] [views:debug,2014-08-19T16:49:26.877,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/250. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:26.877,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",250,active,0} [ns_server:debug,2014-08-19T16:49:26.944,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 248. Nacking mccouch update. [views:debug,2014-08-19T16:49:26.944,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/248. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:26.944,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",248,active,0} [ns_server:debug,2014-08-19T16:49:26.945,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360,722, 594,956,828,462,334,760,696,632,568,994,930,866,802,500,436,372,308,734,670, 606,542,968,904,840,776,474,410,346,282,708,644,580,516,942,878,814,448,384, 320,256,1006,746,682,618,554,252,980,916,852,788,486,422,358,294,720,656,592, 528,954,890,826,460,396,332,268,1018,758,694,630,566,992,928,864,800,498,434, 370,306,732,668,604,540,966,902,838,774,472,408,344,280,706,642,578,514,940, 876,812,510,446,382,318,1004,744,680,616,552,250,978,914,850,786,484,420,356, 292,718,654,590,526,952,888,824,458,394,330,266,1016,756,692,628,564,990,926, 862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,278,704, 640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848, 784,482,418,354,290,716,652,588,524,950,886,822,456,392,328,264,1014,754,690, 626,562,988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468, 404,340,276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548, 974,910,846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,262, 1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,960,896, 832,768,466,402,338,274,764,700,636,572,998,934,870,806,504,440,376,312,738, 674,610,546,972,908,844,780,478,414,350,286,712,648,584,520,1023,946,882,818, 452,388,324,260,1010,686,558,920,792,426,298,660,532,894,400,272,1022,762, 634,996,868,502,374,736,608,970,842,476,348,710,582,944,816,450,322,684,556, 918,790,424,296,658,530,892,398,270,1020] [views:debug,2014-08-19T16:49:26.978,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/248. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:26.978,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",248,active,0} [ns_server:debug,2014-08-19T16:49:27.120,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 246. Nacking mccouch update. [views:debug,2014-08-19T16:49:27.120,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/246. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:27.120,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",246,active,0} [ns_server:debug,2014-08-19T16:49:27.121,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360,722, 594,956,828,462,334,760,696,632,568,994,930,866,802,500,436,372,308,734,670, 606,542,968,904,840,776,474,410,346,282,708,644,580,516,942,878,814,448,384, 320,256,1006,746,682,618,554,252,980,916,852,788,486,422,358,294,720,656,592, 528,954,890,826,460,396,332,268,1018,758,694,630,566,992,928,864,800,498,434, 370,306,732,668,604,540,966,902,838,774,472,408,344,280,706,642,578,514,940, 876,812,510,446,382,318,1004,744,680,616,552,250,978,914,850,786,484,420,356, 292,718,654,590,526,952,888,824,458,394,330,266,1016,756,692,628,564,990,926, 862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,278,704, 640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848, 784,482,418,354,290,716,652,588,524,950,886,822,456,392,328,264,1014,754,690, 626,562,988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468, 404,340,276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548, 246,974,910,846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326, 262,1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,960, 896,832,768,466,402,338,274,764,700,636,572,998,934,870,806,504,440,376,312, 738,674,610,546,972,908,844,780,478,414,350,286,712,648,584,520,1023,946,882, 818,452,388,324,260,1010,686,558,920,792,426,298,660,532,894,400,272,1022, 762,634,996,868,502,374,736,608,970,842,476,348,710,582,944,816,450,322,684, 556,918,790,424,296,658,530,892,398,270,1020] [views:debug,2014-08-19T16:49:27.196,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/246. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:27.196,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",246,active,0} [ns_server:debug,2014-08-19T16:49:27.371,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 244. Nacking mccouch update. [views:debug,2014-08-19T16:49:27.371,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/244. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:27.371,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",244,active,0} [ns_server:debug,2014-08-19T16:49:27.372,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360,722, 594,956,828,462,334,696,568,994,930,866,802,500,436,372,308,734,670,606,542, 968,904,840,776,474,410,346,282,708,644,580,516,942,878,814,448,384,320,256, 1006,746,682,618,554,252,980,916,852,788,486,422,358,294,720,656,592,528,954, 890,826,460,396,332,268,1018,758,694,630,566,992,928,864,800,498,434,370,306, 732,668,604,540,966,902,838,774,472,408,344,280,706,642,578,514,940,876,812, 510,446,382,318,1004,744,680,616,552,250,978,914,850,786,484,420,356,292,718, 654,590,526,952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862,798, 496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,278,704,640,576, 512,938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482, 418,354,290,716,652,588,524,950,886,822,456,392,328,264,1014,754,690,626,562, 988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404,340, 276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,246,974, 910,846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,262,1012, 752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,960,896,832, 768,466,402,338,274,764,700,636,572,998,934,870,806,504,440,376,312,738,674, 610,546,244,972,908,844,780,478,414,350,286,712,648,584,520,1023,946,882,818, 452,388,324,260,1010,686,558,920,792,426,298,660,532,894,400,272,1022,762, 634,996,868,502,374,736,608,970,842,476,348,710,582,944,816,450,322,684,556, 918,790,424,296,658,530,892,398,270,1020,760,632] [views:debug,2014-08-19T16:49:27.455,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/244. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:27.456,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",244,active,0} [ns_server:debug,2014-08-19T16:49:27.630,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 242. Nacking mccouch update. [views:debug,2014-08-19T16:49:27.630,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/242. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:27.630,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",242,active,0} [ns_server:debug,2014-08-19T16:49:27.631,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360,722, 594,956,828,462,334,696,568,994,930,866,802,500,436,372,308,734,670,606,542, 968,904,840,776,474,410,346,282,708,644,580,516,942,878,814,448,384,320,256, 1006,746,682,618,554,252,980,916,852,788,486,422,358,294,720,656,592,528,954, 890,826,460,396,332,268,1018,758,694,630,566,992,928,864,800,498,434,370,306, 732,668,604,540,966,902,838,774,472,408,344,280,706,642,578,514,940,876,812, 510,446,382,318,1004,744,680,616,552,250,978,914,850,786,484,420,356,292,718, 654,590,526,952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862,798, 496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,278,704,640,576, 512,938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482, 418,354,290,716,652,588,524,950,886,822,456,392,328,264,1014,754,690,626,562, 988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404,340, 276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,246,974, 910,846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,262,1012, 752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,960,896,832, 768,466,402,338,274,764,700,636,572,998,934,870,806,504,440,376,312,738,674, 610,546,244,972,908,844,780,478,414,350,286,712,648,584,520,1023,946,882,818, 452,388,324,260,1010,686,558,920,792,426,298,660,532,894,400,272,1022,762, 634,996,868,502,374,736,608,242,970,842,476,348,710,582,944,816,450,322,684, 556,918,790,424,296,658,530,892,398,270,1020,760,632] [views:debug,2014-08-19T16:49:27.714,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/242. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:27.714,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",242,active,0} [ns_server:debug,2014-08-19T16:49:27.872,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 240. Nacking mccouch update. [views:debug,2014-08-19T16:49:27.873,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/240. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:27.873,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",240,active,0} [ns_server:debug,2014-08-19T16:49:27.874,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360,722, 594,956,828,462,334,696,568,994,930,866,802,500,436,372,308,734,670,606,542, 240,968,904,840,776,474,410,346,282,708,644,580,516,942,878,814,448,384,320, 256,1006,746,682,618,554,252,980,916,852,788,486,422,358,294,720,656,592,528, 954,890,826,460,396,332,268,1018,758,694,630,566,992,928,864,800,498,434,370, 306,732,668,604,540,966,902,838,774,472,408,344,280,706,642,578,514,940,876, 812,510,446,382,318,1004,744,680,616,552,250,978,914,850,786,484,420,356,292, 718,654,590,526,952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862, 798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,278,704,640, 576,512,938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784, 482,418,354,290,716,652,588,524,950,886,822,456,392,328,264,1014,754,690,626, 562,988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468,404, 340,276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,246, 974,910,846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,262, 1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,960,896, 832,768,466,402,338,274,764,700,636,572,998,934,870,806,504,440,376,312,738, 674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520,1023,946,882, 818,452,388,324,260,1010,686,558,920,792,426,298,660,532,894,400,272,1022, 762,634,996,868,502,374,736,608,242,970,842,476,348,710,582,944,816,450,322, 684,556,918,790,424,296,658,530,892,398,270,1020,760,632] [views:debug,2014-08-19T16:49:27.957,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/240. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:27.957,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",240,active,0} [ns_server:debug,2014-08-19T16:49:28.127,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 238. Nacking mccouch update. [views:debug,2014-08-19T16:49:28.127,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/238. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:28.127,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",238,active,0} [ns_server:debug,2014-08-19T16:49:28.128,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360,722, 594,956,828,462,334,696,568,994,930,866,802,500,436,372,308,734,670,606,542, 240,968,904,840,776,474,410,346,282,708,644,580,516,942,878,814,448,384,320, 256,1006,746,682,618,554,252,980,916,852,788,486,422,358,294,720,656,592,528, 954,890,826,460,396,332,268,1018,758,694,630,566,992,928,864,800,498,434,370, 306,732,668,604,540,238,966,902,838,774,472,408,344,280,706,642,578,514,940, 876,812,510,446,382,318,1004,744,680,616,552,250,978,914,850,786,484,420,356, 292,718,654,590,526,952,888,824,458,394,330,266,1016,756,692,628,564,990,926, 862,798,496,432,368,304,730,666,602,538,964,900,836,772,470,406,342,278,704, 640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848, 784,482,418,354,290,716,652,588,524,950,886,822,456,392,328,264,1014,754,690, 626,562,988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770,468, 404,340,276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548, 246,974,910,846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326, 262,1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,960, 896,832,768,466,402,338,274,764,700,636,572,998,934,870,806,504,440,376,312, 738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520,1023,946, 882,818,452,388,324,260,1010,686,558,920,792,426,298,660,532,894,400,272, 1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710,582,944,816,450, 322,684,556,918,790,424,296,658,530,892,398,270,1020,760,632] [views:debug,2014-08-19T16:49:28.169,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/238. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:28.169,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",238,active,0} [ns_server:debug,2014-08-19T16:49:28.244,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 236. Nacking mccouch update. [views:debug,2014-08-19T16:49:28.245,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/236. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:28.245,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",236,active,0} [ns_server:debug,2014-08-19T16:49:28.246,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360,722, 594,956,828,462,334,696,568,994,930,866,802,500,436,372,308,734,670,606,542, 240,968,904,840,776,474,410,346,282,708,644,580,516,942,878,814,448,384,320, 256,1006,746,682,618,554,252,980,916,852,788,486,422,358,294,720,656,592,528, 954,890,826,460,396,332,268,1018,758,694,630,566,992,928,864,800,498,434,370, 306,732,668,604,540,238,966,902,838,774,472,408,344,280,706,642,578,514,940, 876,812,510,446,382,318,1004,744,680,616,552,250,978,914,850,786,484,420,356, 292,718,654,590,526,952,888,824,458,394,330,266,1016,756,692,628,564,990,926, 862,798,496,432,368,304,730,666,602,538,236,964,900,836,772,470,406,342,278, 704,640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912, 848,784,482,418,354,290,716,652,588,524,950,886,822,456,392,328,264,1014,754, 690,626,562,988,924,860,796,494,430,366,302,728,664,600,536,962,898,834,770, 468,404,340,276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612, 548,246,974,910,846,782,480,416,352,288,714,650,586,522,948,884,820,454,390, 326,262,1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534, 960,896,832,768,466,402,338,274,764,700,636,572,998,934,870,806,504,440,376, 312,738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520,1023, 946,882,818,452,388,324,260,1010,686,558,920,792,426,298,660,532,894,400,272, 1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710,582,944,816,450, 322,684,556,918,790,424,296,658,530,892,398,270,1020,760,632] [views:debug,2014-08-19T16:49:28.279,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/236. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:28.279,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",236,active,0} [ns_server:debug,2014-08-19T16:49:28.354,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 234. Nacking mccouch update. [views:debug,2014-08-19T16:49:28.354,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/234. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:28.354,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",234,active,0} [ns_server:debug,2014-08-19T16:49:28.355,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360,722, 594,956,828,462,334,696,568,930,802,436,308,734,670,606,542,240,968,904,840, 776,474,410,346,282,708,644,580,516,942,878,814,448,384,320,256,1006,746,682, 618,554,252,980,916,852,788,486,422,358,294,720,656,592,528,954,890,826,460, 396,332,268,1018,758,694,630,566,992,928,864,800,498,434,370,306,732,668,604, 540,238,966,902,838,774,472,408,344,280,706,642,578,514,940,876,812,510,446, 382,318,1004,744,680,616,552,250,978,914,850,786,484,420,356,292,718,654,590, 526,952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862,798,496,432, 368,304,730,666,602,538,236,964,900,836,772,470,406,342,278,704,640,576,512, 938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482,418, 354,290,716,652,588,524,950,886,822,456,392,328,264,1014,754,690,626,562,988, 924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404,340, 276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,246,974, 910,846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,262,1012, 752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,960,896,832, 768,466,402,338,274,764,700,636,572,998,934,870,806,504,440,376,312,738,674, 610,546,244,972,908,844,780,478,414,350,286,712,648,584,520,1023,946,882,818, 452,388,324,260,1010,686,558,920,792,426,298,660,532,894,400,272,1022,762, 634,996,868,502,374,736,608,242,970,842,476,348,710,582,944,816,450,322,684, 556,918,790,424,296,658,530,892,398,270,1020,760,632,994,866,500,372] [views:debug,2014-08-19T16:49:28.388,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/234. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:28.388,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",234,active,0} [ns_server:debug,2014-08-19T16:49:28.463,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 232. Nacking mccouch update. [views:debug,2014-08-19T16:49:28.463,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/232. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:28.463,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",232,active,0} [ns_server:debug,2014-08-19T16:49:28.464,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,958,830,464,336,698,570,932,804,438,310,672, 544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360,722, 594,956,828,462,334,696,568,930,802,436,308,734,670,606,542,240,968,904,840, 776,474,410,346,282,708,644,580,516,942,878,814,448,384,320,256,1006,746,682, 618,554,252,980,916,852,788,486,422,358,294,720,656,592,528,954,890,826,460, 396,332,268,1018,758,694,630,566,992,928,864,800,498,434,370,306,732,668,604, 540,238,966,902,838,774,472,408,344,280,706,642,578,514,940,876,812,510,446, 382,318,1004,744,680,616,552,250,978,914,850,786,484,420,356,292,718,654,590, 526,952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862,798,496,432, 368,304,730,666,602,538,236,964,900,836,772,470,406,342,278,704,640,576,512, 938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482,418, 354,290,716,652,588,524,950,886,822,456,392,328,264,1014,754,690,626,562,988, 924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404,340, 276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,246,974, 910,846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,262,1012, 752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,232,960,896, 832,768,466,402,338,274,764,700,636,572,998,934,870,806,504,440,376,312,738, 674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520,1023,946,882, 818,452,388,324,260,1010,686,558,920,792,426,298,660,532,894,400,272,1022, 762,634,996,868,502,374,736,608,242,970,842,476,348,710,582,944,816,450,322, 684,556,918,790,424,296,658,530,892,398,270,1020,760,632,994,866,500,372] [views:debug,2014-08-19T16:49:28.497,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/232. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:28.497,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",232,active,0} [ns_server:debug,2014-08-19T16:49:28.564,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 230. Nacking mccouch update. [views:debug,2014-08-19T16:49:28.564,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/230. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:28.564,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",230,active,0} [ns_server:debug,2014-08-19T16:49:28.565,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,932,804,438,310, 672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360, 722,594,956,828,462,334,696,568,930,802,436,308,734,670,606,542,240,968,904, 840,776,474,410,346,282,708,644,580,516,942,878,814,448,384,320,256,1006,746, 682,618,554,252,980,916,852,788,486,422,358,294,720,656,592,528,954,890,826, 460,396,332,268,1018,758,694,630,566,992,928,864,800,498,434,370,306,732,668, 604,540,238,966,902,838,774,472,408,344,280,706,642,578,514,940,876,812,510, 446,382,318,1004,744,680,616,552,250,978,914,850,786,484,420,356,292,718,654, 590,526,952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862,798,496, 432,368,304,730,666,602,538,236,964,900,836,772,470,406,342,278,704,640,576, 512,938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482, 418,354,290,716,652,588,524,950,886,822,456,392,328,264,1014,754,690,626,562, 988,924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404, 340,276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,246, 974,910,846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,262, 1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,232,960, 896,832,768,466,402,338,274,764,700,636,572,998,934,870,806,504,440,376,312, 738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520,1023,946, 882,818,452,388,324,260,1010,686,558,920,792,426,298,660,532,894,400,272, 1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710,582,944,816,450, 322,684,556,918,790,424,296,658,530,892,398,270,1020,760,632,994,866,500,372] [views:debug,2014-08-19T16:49:28.598,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/230. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:28.598,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",230,active,0} [ns_server:debug,2014-08-19T16:49:28.765,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 228. Nacking mccouch update. [views:debug,2014-08-19T16:49:28.765,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/228. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:28.765,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",228,active,0} [ns_server:debug,2014-08-19T16:49:28.766,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,932,804,438,310, 672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360, 722,594,228,956,828,462,334,696,568,930,802,436,308,734,670,606,542,240,968, 904,840,776,474,410,346,282,708,644,580,516,942,878,814,448,384,320,256,1006, 746,682,618,554,252,980,916,852,788,486,422,358,294,720,656,592,528,954,890, 826,460,396,332,268,1018,758,694,630,566,992,928,864,800,498,434,370,306,732, 668,604,540,238,966,902,838,774,472,408,344,280,706,642,578,514,940,876,812, 510,446,382,318,1004,744,680,616,552,250,978,914,850,786,484,420,356,292,718, 654,590,526,952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862,798, 496,432,368,304,730,666,602,538,236,964,900,836,772,470,406,342,278,704,640, 576,512,938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784, 482,418,354,290,716,652,588,524,950,886,822,456,392,328,264,1014,754,690,626, 562,988,924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468, 404,340,276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548, 246,974,910,846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326, 262,1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,232, 960,896,832,768,466,402,338,274,764,700,636,572,998,934,870,806,504,440,376, 312,738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520,1023, 946,882,818,452,388,324,260,1010,686,558,920,792,426,298,660,532,894,400,272, 1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710,582,944,816,450, 322,684,556,918,790,424,296,658,530,892,398,270,1020,760,632,994,866,500,372] [views:debug,2014-08-19T16:49:28.849,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/228. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:28.849,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",228,active,0} [ns_server:debug,2014-08-19T16:49:29.033,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 226. Nacking mccouch update. [views:debug,2014-08-19T16:49:29.033,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/226. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:29.033,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",226,active,0} [ns_server:debug,2014-08-19T16:49:29.034,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,932,804,438,310, 672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360, 722,594,228,956,828,462,334,696,568,930,802,436,308,734,670,606,542,240,968, 904,840,776,474,410,346,282,708,644,580,516,942,878,814,448,384,320,256,1006, 746,682,618,554,252,980,916,852,788,486,422,358,294,720,656,592,528,226,954, 890,826,460,396,332,268,1018,758,694,630,566,992,928,864,800,498,434,370,306, 732,668,604,540,238,966,902,838,774,472,408,344,280,706,642,578,514,940,876, 812,510,446,382,318,1004,744,680,616,552,250,978,914,850,786,484,420,356,292, 718,654,590,526,952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862, 798,496,432,368,304,730,666,602,538,236,964,900,836,772,470,406,342,278,704, 640,576,512,938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848, 784,482,418,354,290,716,652,588,524,950,886,822,456,392,328,264,1014,754,690, 626,562,988,924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770, 468,404,340,276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612, 548,246,974,910,846,782,480,416,352,288,714,650,586,522,948,884,820,454,390, 326,262,1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534, 232,960,896,832,768,466,402,338,274,764,700,636,572,998,934,870,806,504,440, 376,312,738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520, 1023,946,882,818,452,388,324,260,1010,686,558,920,792,426,298,660,532,894, 400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710,582,944, 816,450,322,684,556,918,790,424,296,658,530,892,398,270,1020,760,632,994,866, 500,372] [views:debug,2014-08-19T16:49:29.117,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/226. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:29.117,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",226,active,0} [ns_server:debug,2014-08-19T16:49:29.292,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 224. Nacking mccouch update. [views:debug,2014-08-19T16:49:29.292,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/224. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:29.292,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",224,active,0} [ns_server:debug,2014-08-19T16:49:29.293,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,932,804,438,310, 672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360, 722,594,228,956,828,462,334,696,568,930,802,436,308,670,542,968,904,840,776, 474,410,346,282,708,644,580,516,942,878,814,448,384,320,256,1006,746,682,618, 554,252,980,916,852,788,486,422,358,294,720,656,592,528,226,954,890,826,460, 396,332,268,1018,758,694,630,566,992,928,864,800,498,434,370,306,732,668,604, 540,238,966,902,838,774,472,408,344,280,706,642,578,514,940,876,812,510,446, 382,318,1004,744,680,616,552,250,978,914,850,786,484,420,356,292,718,654,590, 526,224,952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862,798,496, 432,368,304,730,666,602,538,236,964,900,836,772,470,406,342,278,704,640,576, 512,938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482, 418,354,290,716,652,588,524,950,886,822,456,392,328,264,1014,754,690,626,562, 988,924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404, 340,276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,246, 974,910,846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326,262, 1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,232,960, 896,832,768,466,402,338,274,764,700,636,572,998,934,870,806,504,440,376,312, 738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520,1023,946, 882,818,452,388,324,260,1010,686,558,920,792,426,298,660,532,894,400,272, 1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710,582,944,816,450, 322,684,556,918,790,424,296,658,530,892,398,270,1020,760,632,994,866,500,372, 734,606,240] [views:debug,2014-08-19T16:49:29.376,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/224. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:29.376,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",224,active,0} [ns_server:debug,2014-08-19T16:49:29.560,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 222. Nacking mccouch update. [views:debug,2014-08-19T16:49:29.560,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/222. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:29.560,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",222,active,0} [ns_server:debug,2014-08-19T16:49:29.561,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,932,804,438,310, 672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360, 722,594,228,956,828,462,334,696,568,930,802,436,308,670,542,968,904,840,776, 474,410,346,282,708,644,580,516,942,878,814,448,384,320,256,1006,746,682,618, 554,252,980,916,852,788,486,422,358,294,720,656,592,528,226,954,890,826,460, 396,332,268,1018,758,694,630,566,992,928,864,800,498,434,370,306,732,668,604, 540,238,966,902,838,774,472,408,344,280,706,642,578,514,940,876,812,510,446, 382,318,1004,744,680,616,552,250,978,914,850,786,484,420,356,292,718,654,590, 526,224,952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862,798,496, 432,368,304,730,666,602,538,236,964,900,836,772,470,406,342,278,704,640,576, 512,938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482, 418,354,290,716,652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626, 562,988,924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468, 404,340,276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548, 246,974,910,846,782,480,416,352,288,714,650,586,522,948,884,820,454,390,326, 262,1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,232, 960,896,832,768,466,402,338,274,764,700,636,572,998,934,870,806,504,440,376, 312,738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520,1023, 946,882,818,452,388,324,260,1010,686,558,920,792,426,298,660,532,894,400,272, 1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710,582,944,816,450, 322,684,556,918,790,424,296,658,530,892,398,270,1020,760,632,994,866,500,372, 734,606,240] [views:debug,2014-08-19T16:49:29.643,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/222. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:29.643,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",222,active,0} [ns_server:debug,2014-08-19T16:49:29.748,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 220. Nacking mccouch update. [views:debug,2014-08-19T16:49:29.748,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/220. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:29.748,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",220,active,0} [ns_server:debug,2014-08-19T16:49:29.749,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,932,804,438,310, 672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360, 722,594,228,956,828,462,334,696,568,930,802,436,308,670,542,968,904,840,776, 474,410,346,282,708,644,580,516,942,878,814,448,384,320,256,1006,746,682,618, 554,252,980,916,852,788,486,422,358,294,720,656,592,528,226,954,890,826,460, 396,332,268,1018,758,694,630,566,992,928,864,800,498,434,370,306,732,668,604, 540,238,966,902,838,774,472,408,344,280,706,642,578,514,940,876,812,510,446, 382,318,1004,744,680,616,552,250,978,914,850,786,484,420,356,292,718,654,590, 526,224,952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862,798,496, 432,368,304,730,666,602,538,236,964,900,836,772,470,406,342,278,704,640,576, 512,938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482, 418,354,290,716,652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626, 562,988,924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468, 404,340,276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548, 246,974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390, 326,262,1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534, 232,960,896,832,768,466,402,338,274,764,700,636,572,998,934,870,806,504,440, 376,312,738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520, 1023,946,882,818,452,388,324,260,1010,686,558,920,792,426,298,660,532,894, 400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710,582,944, 816,450,322,684,556,918,790,424,296,658,530,892,398,270,1020,760,632,994,866, 500,372,734,606,240] [views:debug,2014-08-19T16:49:29.781,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/220. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:29.781,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",220,active,0} [ns_server:debug,2014-08-19T16:49:29.857,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 218. Nacking mccouch update. [views:debug,2014-08-19T16:49:29.857,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/218. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:29.857,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",218,active,0} [ns_server:debug,2014-08-19T16:49:29.858,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,932,804,438,310, 672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360, 722,594,228,956,828,462,334,696,568,930,802,436,308,670,542,968,904,840,776, 474,410,346,282,708,644,580,516,942,878,814,448,384,320,256,1006,746,682,618, 554,252,980,916,852,788,486,422,358,294,720,656,592,528,226,954,890,826,460, 396,332,268,1018,758,694,630,566,992,928,864,800,498,434,370,306,732,668,604, 540,238,966,902,838,774,472,408,344,280,706,642,578,514,940,876,812,510,446, 382,318,1004,744,680,616,552,250,978,914,850,786,484,420,356,292,718,654,590, 526,224,952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862,798,496, 432,368,304,730,666,602,538,236,964,900,836,772,470,406,342,278,704,640,576, 512,938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482, 418,354,290,716,652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626, 562,988,924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468, 404,340,276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548, 246,974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390, 326,262,1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534, 232,960,896,832,768,466,402,338,274,764,700,636,572,998,934,870,806,504,440, 376,312,738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520, 218,1023,946,882,818,452,388,324,260,1010,686,558,920,792,426,298,660,532, 894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710,582, 944,816,450,322,684,556,918,790,424,296,658,530,892,398,270,1020,760,632,994, 866,500,372,734,606,240] [views:debug,2014-08-19T16:49:29.907,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/218. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:29.907,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",218,active,0} [ns_server:debug,2014-08-19T16:49:30.049,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 216. Nacking mccouch update. [views:debug,2014-08-19T16:49:30.049,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/216. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:30.049,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",216,active,0} [ns_server:debug,2014-08-19T16:49:30.050,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,932,804,438,310, 672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360, 722,594,228,956,828,462,334,696,568,930,802,436,308,670,542,968,904,840,776, 474,410,346,282,708,644,580,516,942,878,814,448,384,320,256,1006,746,682,618, 554,252,980,916,852,788,486,422,358,294,720,656,592,528,226,954,890,826,460, 396,332,268,1018,758,694,630,566,992,928,864,800,498,434,370,306,732,668,604, 540,238,966,902,838,774,472,408,344,280,706,642,578,514,940,876,812,510,446, 382,318,1004,744,680,616,552,250,978,914,850,786,484,420,356,292,718,654,590, 526,224,952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862,798,496, 432,368,304,730,666,602,538,236,964,900,836,772,470,406,342,278,704,640,576, 512,938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482, 418,354,290,716,652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626, 562,988,924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468, 404,340,276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548, 246,974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390, 326,262,1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534, 232,960,896,832,768,466,402,338,274,764,700,636,572,998,934,870,806,504,440, 376,312,738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520, 218,1023,946,882,818,452,388,324,260,1010,686,558,920,792,426,298,660,532, 894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710,582, 216,944,816,450,322,684,556,918,790,424,296,658,530,892,398,270,1020,760,632, 994,866,500,372,734,606,240] [views:debug,2014-08-19T16:49:30.117,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/216. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:30.117,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",216,active,0} [ns_server:debug,2014-08-19T16:49:30.266,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 214. Nacking mccouch update. [views:debug,2014-08-19T16:49:30.266,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/214. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:30.267,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",214,active,0} [ns_server:debug,2014-08-19T16:49:30.267,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,932,804,438,310, 672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360, 722,594,228,956,828,462,334,696,568,930,802,436,308,670,542,904,776,410,282, 708,644,580,516,214,942,878,814,448,384,320,256,1006,746,682,618,554,252,980, 916,852,788,486,422,358,294,720,656,592,528,226,954,890,826,460,396,332,268, 1018,758,694,630,566,992,928,864,800,498,434,370,306,732,668,604,540,238,966, 902,838,774,472,408,344,280,706,642,578,514,940,876,812,510,446,382,318,1004, 744,680,616,552,250,978,914,850,786,484,420,356,292,718,654,590,526,224,952, 888,824,458,394,330,266,1016,756,692,628,564,990,926,862,798,496,432,368,304, 730,666,602,538,236,964,900,836,772,470,406,342,278,704,640,576,512,938,874, 810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482,418,354,290, 716,652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562,988,924, 860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404,340,276, 766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,246,974,910, 846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390,326,262,1012, 752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,232,960,896, 832,768,466,402,338,274,764,700,636,572,998,934,870,806,504,440,376,312,738, 674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520,218,1023,946, 882,818,452,388,324,260,1010,686,558,920,792,426,298,660,532,894,400,272, 1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710,582,216,944,816, 450,322,684,556,918,790,424,296,658,530,892,398,270,1020,760,632,994,866,500, 372,734,606,240,968,840,474,346] [views:debug,2014-08-19T16:49:30.335,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/214. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:30.335,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",214,active,0} [ns_server:debug,2014-08-19T16:49:30.493,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 212. Nacking mccouch update. [views:debug,2014-08-19T16:49:30.494,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/212. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:30.494,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",212,active,0} [ns_server:debug,2014-08-19T16:49:30.495,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,932,804,438,310, 672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360, 722,594,228,956,828,462,334,696,568,930,802,436,308,670,542,904,776,410,282, 708,644,580,516,214,942,878,814,448,384,320,256,1006,746,682,618,554,252,980, 916,852,788,486,422,358,294,720,656,592,528,226,954,890,826,460,396,332,268, 1018,758,694,630,566,992,928,864,800,498,434,370,306,732,668,604,540,238,966, 902,838,774,472,408,344,280,706,642,578,514,212,940,876,812,510,446,382,318, 1004,744,680,616,552,250,978,914,850,786,484,420,356,292,718,654,590,526,224, 952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862,798,496,432,368, 304,730,666,602,538,236,964,900,836,772,470,406,342,278,704,640,576,512,938, 874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482,418,354, 290,716,652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562,988, 924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404,340, 276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,246,974, 910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390,326,262, 1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,232,960, 896,832,768,466,402,338,274,764,700,636,572,998,934,870,806,504,440,376,312, 738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520,218,1023, 946,882,818,452,388,324,260,1010,686,558,920,792,426,298,660,532,894,400,272, 1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710,582,216,944,816, 450,322,684,556,918,790,424,296,658,530,892,398,270,1020,760,632,994,866,500, 372,734,606,240,968,840,474,346] [views:debug,2014-08-19T16:49:30.561,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/212. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:30.561,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",212,active,0} [ns_server:debug,2014-08-19T16:49:30.661,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 210. Nacking mccouch update. [views:debug,2014-08-19T16:49:30.661,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/210. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:30.661,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",210,active,0} [ns_server:debug,2014-08-19T16:49:30.662,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,932,804,438,310, 672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360, 722,594,228,956,828,462,334,696,568,930,802,436,308,670,542,904,776,410,282, 708,644,580,516,214,942,878,814,448,384,320,256,1006,746,682,618,554,252,980, 916,852,788,486,422,358,294,720,656,592,528,226,954,890,826,460,396,332,268, 1018,758,694,630,566,992,928,864,800,498,434,370,306,732,668,604,540,238,966, 902,838,774,472,408,344,280,706,642,578,514,212,940,876,812,510,446,382,318, 1004,744,680,616,552,250,978,914,850,786,484,420,356,292,718,654,590,526,224, 952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862,798,496,432,368, 304,730,666,602,538,236,964,900,836,772,470,406,342,278,704,640,576,512,210, 938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482,418, 354,290,716,652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562, 988,924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404, 340,276,766,702,638,574,936,872,808,506,442,378,314,1000,740,676,612,548,246, 974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390,326, 262,1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,232, 960,896,832,768,466,402,338,274,764,700,636,572,998,934,870,806,504,440,376, 312,738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520,218, 1023,946,882,818,452,388,324,260,1010,686,558,920,792,426,298,660,532,894, 400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710,582,216, 944,816,450,322,684,556,918,790,424,296,658,530,892,398,270,1020,760,632,994, 866,500,372,734,606,240,968,840,474,346] [views:debug,2014-08-19T16:49:30.721,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/210. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:30.721,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",210,active,0} [ns_server:debug,2014-08-19T16:49:30.813,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 208. Nacking mccouch update. [views:debug,2014-08-19T16:49:30.813,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/208. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:30.813,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",208,active,0} [ns_server:debug,2014-08-19T16:49:30.814,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,932,804,438,310, 672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360, 722,594,228,956,828,462,334,696,568,930,802,436,308,670,542,904,776,410,282, 708,644,580,516,214,942,878,814,448,384,320,256,1006,746,682,618,554,252,980, 916,852,788,486,422,358,294,720,656,592,528,226,954,890,826,460,396,332,268, 1018,758,694,630,566,992,928,864,800,498,434,370,306,732,668,604,540,238,966, 902,838,774,472,408,344,280,706,642,578,514,212,940,876,812,510,446,382,318, 1004,744,680,616,552,250,978,914,850,786,484,420,356,292,718,654,590,526,224, 952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862,798,496,432,368, 304,730,666,602,538,236,964,900,836,772,470,406,342,278,704,640,576,512,210, 938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482,418, 354,290,716,652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562, 988,924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404, 340,276,766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548, 246,974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390, 326,262,1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534, 232,960,896,832,768,466,402,338,274,764,700,636,572,998,934,870,806,504,440, 376,312,738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520, 218,1023,946,882,818,452,388,324,260,1010,686,558,920,792,426,298,660,532, 894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710,582, 216,944,816,450,322,684,556,918,790,424,296,658,530,892,398,270,1020,760,632, 994,866,500,372,734,606,240,968,840,474,346] [views:debug,2014-08-19T16:49:30.864,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/208. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:30.864,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",208,active,0} [ns_server:debug,2014-08-19T16:49:30.913,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:30.913,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.91',membership} -> active [ns_server:debug,2014-08-19T16:49:30.913,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.90',membership} -> active [user:info,2014-08-19T16:49:30.913,ns_1@10.242.238.88:<0.20999.0>:ns_orchestrator:idle:623]Starting rebalance, KeepNodes = ['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91'], EjectNodes = [] [ns_server:debug,2014-08-19T16:49:30.913,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:30.913,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.89',membership} -> active [ns_server:debug,2014-08-19T16:49:30.913,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([{node,'ns_1@10.242.238.88',membership}, {node,'ns_1@10.242.238.89',membership}, {node,'ns_1@10.242.238.90',membership}, {node,'ns_1@10.242.238.91',membership}]..) [ns_server:debug,2014-08-19T16:49:30.913,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:30.913,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: {node,'ns_1@10.242.238.88',membership} -> active [ns_server:debug,2014-08-19T16:49:30.914,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: counters -> [{rebalance_start,1}] [ns_server:debug,2014-08-19T16:49:30.914,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: rebalancer_pid -> <0.25442.0> [ns_server:debug,2014-08-19T16:49:30.914,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: rebalance_status -> running [ns_server:debug,2014-08-19T16:49:30.916,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:30.916,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:30.916,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:30.917,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:30.917,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:30.917,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:30.917,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:30.918,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:30.918,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([counters,rebalance_status,rebalancer_pid]..) [ns_server:debug,2014-08-19T16:49:30.919,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:handle_call:113]Got full synchronization request from 'ns_1@10.242.238.88' [ns_server:debug,2014-08-19T16:49:30.919,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:handle_call:119]Fully synchronized config in 13 us [ns_server:debug,2014-08-19T16:49:30.939,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 206. Nacking mccouch update. [views:debug,2014-08-19T16:49:30.939,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/206. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:30.939,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",206,active,0} [ns_server:debug,2014-08-19T16:49:30.940,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,932,804,438,310, 672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488,360, 722,594,228,956,828,462,334,696,568,930,802,436,308,670,542,904,776,410,282, 708,644,580,516,214,942,878,814,448,384,320,256,1006,746,682,618,554,252,980, 916,852,788,486,422,358,294,720,656,592,528,226,954,890,826,460,396,332,268, 1018,758,694,630,566,992,928,864,800,498,434,370,306,732,668,604,540,238,966, 902,838,774,472,408,344,280,706,642,578,514,212,940,876,812,510,446,382,318, 1004,744,680,616,552,250,978,914,850,786,484,420,356,292,718,654,590,526,224, 952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862,798,496,432,368, 304,730,666,602,538,236,964,900,836,772,470,406,342,278,704,640,576,512,210, 938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482,418, 354,290,716,652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562, 988,924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404, 340,276,766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548, 246,974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390, 326,262,1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534, 232,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806,504, 440,376,312,738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584, 520,218,1023,946,882,818,452,388,324,260,1010,686,558,920,792,426,298,660, 532,894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710, 582,216,944,816,450,322,684,556,918,790,424,296,658,530,892,398,270,1020,760, 632,994,866,500,372,734,606,240,968,840,474,346] [rebalance:debug,2014-08-19T16:49:30.931,ns_1@10.242.238.88:<0.25442.0>:ns_rebalancer:rebalance:274]BucketConfigs = [{"default", [{uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88']}, {map,[['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined]]}, {map_opts_hash,133465355}]}] [views:debug,2014-08-19T16:49:30.989,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/206. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:30.990,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",206,active,0} [ns_server:debug,2014-08-19T16:49:31.156,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 204. Nacking mccouch update. [views:debug,2014-08-19T16:49:31.156,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/204. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:31.156,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",204,active,0} [ns_server:debug,2014-08-19T16:49:31.157,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488, 360,722,594,228,956,828,462,334,696,568,930,802,436,308,670,542,904,776,410, 282,644,516,942,878,814,448,384,320,256,1006,746,682,618,554,252,980,916,852, 788,486,422,358,294,720,656,592,528,226,954,890,826,460,396,332,268,1018,758, 694,630,566,992,928,864,800,498,434,370,306,732,668,604,540,238,966,902,838, 774,472,408,344,280,706,642,578,514,212,940,876,812,510,446,382,318,1004,744, 680,616,552,250,978,914,850,786,484,420,356,292,718,654,590,526,224,952,888, 824,458,394,330,266,1016,756,692,628,564,990,926,862,798,496,432,368,304,730, 666,602,538,236,964,900,836,772,470,406,342,278,704,640,576,512,210,938,874, 810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482,418,354,290, 716,652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562,988,924, 860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404,340,276, 766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246,974, 910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390,326,262, 1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,232,960, 896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806,504,440,376, 312,738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520,218, 1023,946,882,818,452,388,324,260,1010,686,558,920,792,426,298,660,532,894, 400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710,582,216, 944,816,450,322,684,556,918,790,424,296,658,530,892,398,270,1020,760,632,994, 866,500,372,734,606,240,968,840,474,346,708,580,214] [views:debug,2014-08-19T16:49:31.240,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/204. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:31.240,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",204,active,0} [ns_server:debug,2014-08-19T16:49:31.352,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 202. Nacking mccouch update. [views:debug,2014-08-19T16:49:31.352,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/202. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:31.353,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",202,active,0} [ns_server:debug,2014-08-19T16:49:31.353,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488, 360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,904,776, 410,282,644,516,942,878,814,448,384,320,256,1006,746,682,618,554,252,980,916, 852,788,486,422,358,294,720,656,592,528,226,954,890,826,460,396,332,268,1018, 758,694,630,566,992,928,864,800,498,434,370,306,732,668,604,540,238,966,902, 838,774,472,408,344,280,706,642,578,514,212,940,876,812,510,446,382,318,1004, 744,680,616,552,250,978,914,850,786,484,420,356,292,718,654,590,526,224,952, 888,824,458,394,330,266,1016,756,692,628,564,990,926,862,798,496,432,368,304, 730,666,602,538,236,964,900,836,772,470,406,342,278,704,640,576,512,210,938, 874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482,418,354, 290,716,652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562,988, 924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404,340, 276,766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246, 974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390,326, 262,1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534,232, 960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806,504,440, 376,312,738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520, 218,1023,946,882,818,452,388,324,260,1010,686,558,920,792,426,298,660,532, 894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710,582, 216,944,816,450,322,684,556,918,790,424,296,658,530,892,398,270,1020,760,632, 994,866,500,372,734,606,240,968,840,474,346,708,580,214] [views:debug,2014-08-19T16:49:31.411,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/202. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:31.411,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",202,active,0} [ns_server:debug,2014-08-19T16:49:31.503,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 200. Nacking mccouch update. [views:debug,2014-08-19T16:49:31.503,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/200. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:31.504,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",200,active,0} [ns_server:debug,2014-08-19T16:49:31.504,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488, 360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,904,776, 410,282,644,516,942,878,814,448,384,320,256,1006,746,682,618,554,252,980,916, 852,788,486,422,358,294,720,656,592,528,226,954,890,826,460,396,332,268,1018, 758,694,630,566,200,992,928,864,800,498,434,370,306,732,668,604,540,238,966, 902,838,774,472,408,344,280,706,642,578,514,212,940,876,812,510,446,382,318, 1004,744,680,616,552,250,978,914,850,786,484,420,356,292,718,654,590,526,224, 952,888,824,458,394,330,266,1016,756,692,628,564,990,926,862,798,496,432,368, 304,730,666,602,538,236,964,900,836,772,470,406,342,278,704,640,576,512,210, 938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482,418, 354,290,716,652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562, 988,924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404, 340,276,766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548, 246,974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390, 326,262,1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598,534, 232,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806,504, 440,376,312,738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584, 520,218,1023,946,882,818,452,388,324,260,1010,686,558,920,792,426,298,660, 532,894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710, 582,216,944,816,450,322,684,556,918,790,424,296,658,530,892,398,270,1020,760, 632,994,866,500,372,734,606,240,968,840,474,346,708,580,214] [views:debug,2014-08-19T16:49:31.587,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/200. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:31.587,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",200,active,0} [ns_server:debug,2014-08-19T16:49:31.754,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 198. Nacking mccouch update. [views:debug,2014-08-19T16:49:31.754,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/198. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:31.754,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",198,active,0} [ns_server:debug,2014-08-19T16:49:31.755,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488, 360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,904,776, 410,282,644,516,942,878,814,448,384,320,256,1006,746,682,618,554,252,980,916, 852,788,486,422,358,294,720,656,592,528,226,954,890,826,460,396,332,268,1018, 758,694,630,566,200,992,928,864,800,498,434,370,306,732,668,604,540,238,966, 902,838,774,472,408,344,280,706,642,578,514,212,940,876,812,510,446,382,318, 1004,744,680,616,552,250,978,914,850,786,484,420,356,292,718,654,590,526,224, 952,888,824,458,394,330,266,1016,756,692,628,564,198,990,926,862,798,496,432, 368,304,730,666,602,538,236,964,900,836,772,470,406,342,278,704,640,576,512, 210,938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482, 418,354,290,716,652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626, 562,988,924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468, 404,340,276,766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612, 548,246,974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454, 390,326,262,1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662,598, 534,232,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806, 504,440,376,312,738,674,610,546,244,972,908,844,780,478,414,350,286,712,648, 584,520,218,1023,946,882,818,452,388,324,260,1010,686,558,920,792,426,298, 660,532,894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476,348, 710,582,216,944,816,450,322,684,556,918,790,424,296,658,530,892,398,270,1020, 760,632,994,866,500,372,734,606,240,968,840,474,346,708,580,214] [views:debug,2014-08-19T16:49:31.838,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/198. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:31.838,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",198,active,0} [ns_server:debug,2014-08-19T16:49:32.013,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 196. Nacking mccouch update. [views:debug,2014-08-19T16:49:32.013,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/196. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:32.013,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",196,active,0} [ns_server:debug,2014-08-19T16:49:32.014,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488, 360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,904,776, 410,282,644,516,942,878,814,448,384,320,256,1006,746,682,618,554,252,980,916, 852,788,486,422,358,294,720,656,592,528,226,954,890,826,460,396,332,268,1018, 758,694,630,566,200,992,928,864,800,498,434,370,306,732,668,604,540,238,966, 902,838,774,472,408,344,280,706,642,578,514,212,940,876,812,510,446,382,318, 1004,744,680,616,552,250,978,914,850,786,484,420,356,292,718,654,590,526,224, 952,888,824,458,394,330,266,1016,756,692,628,564,198,990,926,862,798,496,432, 368,304,730,666,602,538,236,964,900,836,772,470,406,342,278,704,640,576,512, 210,938,874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482, 418,354,290,716,652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626, 562,196,988,924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770, 468,404,340,276,766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676, 612,548,246,974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820, 454,390,326,262,1012,752,688,624,560,986,922,858,794,492,428,364,300,726,662, 598,534,232,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870, 806,504,440,376,312,738,674,610,546,244,972,908,844,780,478,414,350,286,712, 648,584,520,218,1023,946,882,818,452,388,324,260,1010,686,558,920,792,426, 298,660,532,894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476, 348,710,582,216,944,816,450,322,684,556,918,790,424,296,658,530,892,398,270, 1020,760,632,994,866,500,372,734,606,240,968,840,474,346,708,580,214] [views:debug,2014-08-19T16:49:32.098,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/196. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:32.098,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",196,active,0} [ns_server:debug,2014-08-19T16:49:32.248,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 194. Nacking mccouch update. [views:debug,2014-08-19T16:49:32.249,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/194. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:32.249,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",194,active,0} [ns_server:debug,2014-08-19T16:49:32.250,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488, 360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,904,776, 410,282,644,516,878,384,256,1006,746,682,618,554,252,980,916,852,788,486,422, 358,294,720,656,592,528,226,954,890,826,460,396,332,268,1018,758,694,630,566, 200,992,928,864,800,498,434,370,306,732,668,604,540,238,966,902,838,774,472, 408,344,280,706,642,578,514,212,940,876,812,510,446,382,318,1004,744,680,616, 552,250,978,914,850,786,484,420,356,292,718,654,590,526,224,952,888,824,458, 394,330,266,1016,756,692,628,564,198,990,926,862,798,496,432,368,304,730,666, 602,538,236,964,900,836,772,470,406,342,278,704,640,576,512,210,938,874,810, 508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482,418,354,290,716, 652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924, 860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404,340,276, 766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246,974, 910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390,326,262, 1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598,534,232, 960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806,504,440, 376,312,738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520, 218,1023,946,882,818,452,388,324,260,1010,686,558,920,792,426,298,660,532, 894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710,582, 216,944,816,450,322,684,556,918,790,424,296,658,530,892,398,270,1020,760,632, 994,866,500,372,734,606,240,968,840,474,346,708,580,214,942,814,448,320] [views:debug,2014-08-19T16:49:32.308,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/194. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:32.308,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",194,active,0} [ns_server:debug,2014-08-19T16:49:32.424,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 192. Nacking mccouch update. [views:debug,2014-08-19T16:49:32.424,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/192. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:32.424,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",192,active,0} [ns_server:debug,2014-08-19T16:49:32.425,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488, 360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,904,776, 410,282,644,516,878,384,256,1006,746,682,618,554,252,980,916,852,788,486,422, 358,294,720,656,592,528,226,954,890,826,460,396,332,268,1018,758,694,630,566, 200,992,928,864,800,498,434,370,306,732,668,604,540,238,966,902,838,774,472, 408,344,280,706,642,578,514,212,940,876,812,510,446,382,318,1004,744,680,616, 552,250,978,914,850,786,484,420,356,292,718,654,590,526,224,952,888,824,458, 394,330,266,1016,756,692,628,564,198,990,926,862,798,496,432,368,304,730,666, 602,538,236,964,900,836,772,470,406,342,278,704,640,576,512,210,938,874,810, 508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482,418,354,290,716, 652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924, 860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404,340,276, 766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246,974, 910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390,326,262, 1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598,534,232, 960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806,504,440, 376,312,738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520, 218,1023,946,882,818,452,388,324,260,1010,686,558,192,920,792,426,298,660, 532,894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710, 582,216,944,816,450,322,684,556,918,790,424,296,658,530,892,398,270,1020,760, 632,994,866,500,372,734,606,240,968,840,474,346,708,580,214,942,814,448,320] [views:debug,2014-08-19T16:49:32.484,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/192. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:32.484,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",192,active,0} [ns_server:debug,2014-08-19T16:49:32.575,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 190. Nacking mccouch update. [views:debug,2014-08-19T16:49:32.575,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/190. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:32.575,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",190,active,0} [ns_server:debug,2014-08-19T16:49:32.576,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488, 360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,904,776, 410,282,644,516,878,384,256,1006,746,682,618,554,252,980,916,852,788,486,422, 358,294,720,656,592,528,226,954,890,826,460,396,332,268,1018,758,694,630,566, 200,992,928,864,800,498,434,370,306,732,668,604,540,238,966,902,838,774,472, 408,344,280,706,642,578,514,212,940,876,812,510,446,382,318,1004,744,680,616, 552,250,978,914,850,786,484,420,356,292,718,654,590,526,224,952,888,824,458, 394,330,266,1016,756,692,628,564,198,990,926,862,798,496,432,368,304,730,666, 602,538,236,964,900,836,772,470,406,342,278,704,640,576,512,210,938,874,810, 508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482,418,354,290,716, 652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924, 860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404,340,276, 766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246,974, 910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390,326,262, 1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598,534,232, 960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806,504,440, 376,312,738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520, 218,1023,946,882,818,452,388,324,260,1010,686,558,192,920,792,426,298,660, 532,894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710, 582,216,944,816,450,322,684,556,190,918,790,424,296,658,530,892,398,270,1020, 760,632,994,866,500,372,734,606,240,968,840,474,346,708,580,214,942,814,448, 320] [views:debug,2014-08-19T16:49:32.634,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/190. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:32.634,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",190,active,0} [ns_server:debug,2014-08-19T16:49:32.809,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 188. Nacking mccouch update. [views:debug,2014-08-19T16:49:32.809,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/188. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:32.809,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",188,active,0} [ns_server:debug,2014-08-19T16:49:32.810,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488, 360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,904,776, 410,282,644,516,878,384,256,1006,746,682,618,554,252,188,980,916,852,788,486, 422,358,294,720,656,592,528,226,954,890,826,460,396,332,268,1018,758,694,630, 566,200,992,928,864,800,498,434,370,306,732,668,604,540,238,966,902,838,774, 472,408,344,280,706,642,578,514,212,940,876,812,510,446,382,318,1004,744,680, 616,552,250,978,914,850,786,484,420,356,292,718,654,590,526,224,952,888,824, 458,394,330,266,1016,756,692,628,564,198,990,926,862,798,496,432,368,304,730, 666,602,538,236,964,900,836,772,470,406,342,278,704,640,576,512,210,938,874, 810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482,418,354,290, 716,652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562,196,988, 924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404,340, 276,766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246, 974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390,326, 262,1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598,534, 232,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806,504, 440,376,312,738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584, 520,218,1023,946,882,818,452,388,324,260,1010,686,558,192,920,792,426,298, 660,532,894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476,348, 710,582,216,944,816,450,322,684,556,190,918,790,424,296,658,530,892,398,270, 1020,760,632,994,866,500,372,734,606,240,968,840,474,346,708,580,214,942,814, 448,320] [views:debug,2014-08-19T16:49:32.869,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/188. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:32.869,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",188,active,0} [user:info,2014-08-19T16:49:32.948,ns_1@10.242.238.88:<0.25442.0>:ns_rebalancer:rebalance:294]Started rebalancing bucket default [rebalance:info,2014-08-19T16:49:32.948,ns_1@10.242.238.88:<0.25442.0>:ns_rebalancer:rebalance:295]Rebalancing bucket "default" with config [{uuid, <<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88']}, {map, [['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined]]}, {map_opts_hash,133465355}] [ns_server:debug,2014-08-19T16:49:32.958,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:32.958,ns_1@10.242.238.88:<0.25622.0>:ns_rebalancer:rebalance:315]Waiting for bucket "default" to be ready on ['ns_1@10.242.238.88', 'ns_1@10.242.238.89', 'ns_1@10.242.238.90', 'ns_1@10.242.238.91'] [ns_server:debug,2014-08-19T16:49:32.958,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:32.958,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:32.958,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:warn,2014-08-19T16:49:32.959,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:355]Remote server node {'capi_ddoc_replication_srv-default','ns_1@10.242.238.91'} process down: noproc [ns_server:debug,2014-08-19T16:49:32.959,ns_1@10.242.238.88:<0.25630.0>:janitor_agent:new_style_query_vbucket_states_loop:120]Exception from query_vbucket_states of "default":'ns_1@10.242.238.91' {'EXIT',{noproc,{gen_server,call, [{'janitor_agent-default','ns_1@10.242.238.91'}, query_vbucket_states,infinity]}}} [ns_server:warn,2014-08-19T16:49:32.959,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:355]Remote server node {'capi_ddoc_replication_srv-default','ns_1@10.242.238.89'} process down: noproc [ns_server:debug,2014-08-19T16:49:32.959,ns_1@10.242.238.88:<0.25630.0>:janitor_agent:new_style_query_vbucket_states_loop_next_step:125]Waiting for "default" on 'ns_1@10.242.238.91' [ns_server:warn,2014-08-19T16:49:32.959,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:355]Remote server node {'capi_ddoc_replication_srv-default','ns_1@10.242.238.90'} process down: noproc [ns_server:debug,2014-08-19T16:49:32.959,ns_1@10.242.238.88:<0.25628.0>:janitor_agent:new_style_query_vbucket_states_loop:120]Exception from query_vbucket_states of "default":'ns_1@10.242.238.90' {'EXIT',{noproc,{gen_server,call, [{'janitor_agent-default','ns_1@10.242.238.90'}, query_vbucket_states,infinity]}}} [ns_server:debug,2014-08-19T16:49:32.959,ns_1@10.242.238.88:<0.25627.0>:janitor_agent:new_style_query_vbucket_states_loop:120]Exception from query_vbucket_states of "default":'ns_1@10.242.238.89' {'EXIT',{noproc,{gen_server,call, [{'janitor_agent-default','ns_1@10.242.238.89'}, query_vbucket_states,infinity]}}} [ns_server:debug,2014-08-19T16:49:32.959,ns_1@10.242.238.88:<0.25628.0>:janitor_agent:new_style_query_vbucket_states_loop_next_step:125]Waiting for "default" on 'ns_1@10.242.238.90' [ns_server:debug,2014-08-19T16:49:32.961,ns_1@10.242.238.88:<0.25627.0>:janitor_agent:new_style_query_vbucket_states_loop_next_step:125]Waiting for "default" on 'ns_1@10.242.238.89' [ns_server:debug,2014-08-19T16:49:33.031,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:33.035,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:33.043,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 186. Nacking mccouch update. [views:debug,2014-08-19T16:49:33.043,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/186. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:33.044,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",186,active,0} [ns_server:debug,2014-08-19T16:49:33.044,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488, 360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,904,776, 410,282,644,516,878,384,256,1006,746,682,618,554,252,188,980,916,852,788,486, 422,358,294,720,656,592,528,226,954,890,826,460,396,332,268,1018,758,694,630, 566,200,992,928,864,800,498,434,370,306,732,668,604,540,238,966,902,838,774, 472,408,344,280,706,642,578,514,212,940,876,812,510,446,382,318,1004,744,680, 616,552,250,186,978,914,850,786,484,420,356,292,718,654,590,526,224,952,888, 824,458,394,330,266,1016,756,692,628,564,198,990,926,862,798,496,432,368,304, 730,666,602,538,236,964,900,836,772,470,406,342,278,704,640,576,512,210,938, 874,810,508,444,380,316,1002,742,678,614,550,248,976,912,848,784,482,418,354, 290,716,652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562,196, 988,924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404, 340,276,766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548, 246,974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390, 326,262,1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598, 534,232,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806, 504,440,376,312,738,674,610,546,244,972,908,844,780,478,414,350,286,712,648, 584,520,218,1023,946,882,818,452,388,324,260,1010,686,558,192,920,792,426, 298,660,532,894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476, 348,710,582,216,944,816,450,322,684,556,190,918,790,424,296,658,530,892,398, 270,1020,760,632,994,866,500,372,734,606,240,968,840,474,346,708,580,214,942, 814,448,320] [ns_server:debug,2014-08-19T16:49:33.063,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:49:33.122,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/186. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:33.123,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",186,active,0} [ns_server:debug,2014-08-19T16:49:33.198,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 184. Nacking mccouch update. [views:debug,2014-08-19T16:49:33.198,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/184. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:33.199,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",184,active,0} [ns_server:debug,2014-08-19T16:49:33.199,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488, 360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,904,776, 410,282,644,516,878,384,256,1006,746,618,252,980,916,852,788,486,422,358,294, 720,656,592,528,226,954,890,826,460,396,332,268,1018,758,694,630,566,200,992, 928,864,800,498,434,370,306,732,668,604,540,238,966,902,838,774,472,408,344, 280,706,642,578,514,212,940,876,812,510,446,382,318,1004,744,680,616,552,250, 186,978,914,850,786,484,420,356,292,718,654,590,526,224,952,888,824,458,394, 330,266,1016,756,692,628,564,198,990,926,862,798,496,432,368,304,730,666,602, 538,236,964,900,836,772,470,406,342,278,704,640,576,512,210,938,874,810,508, 444,380,316,1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290,716, 652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924, 860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404,340,276, 766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246,974, 910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390,326,262, 1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598,534,232, 960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806,504,440, 376,312,738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584,520, 218,1023,946,882,818,452,388,324,260,1010,686,558,192,920,792,426,298,660, 532,894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476,348,710, 582,216,944,816,450,322,684,556,190,918,790,424,296,658,530,892,398,270,1020, 760,632,994,866,500,372,734,606,240,968,840,474,346,708,580,214,942,814,448, 320,682,554,188] [views:debug,2014-08-19T16:49:33.257,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/184. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:33.257,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",184,active,0} [ns_server:debug,2014-08-19T16:49:33.407,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 182. Nacking mccouch update. [views:debug,2014-08-19T16:49:33.407,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/182. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:33.407,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",182,active,0} [ns_server:debug,2014-08-19T16:49:33.408,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488, 360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,904,776, 410,282,644,516,878,384,256,1006,746,618,252,980,916,852,788,486,422,358,294, 720,656,592,528,226,954,890,826,460,396,332,268,1018,758,694,630,566,200,992, 928,864,800,498,434,370,306,732,668,604,540,238,966,902,838,774,472,408,344, 280,706,642,578,514,212,940,876,812,510,446,382,318,1004,744,680,616,552,250, 186,978,914,850,786,484,420,356,292,718,654,590,526,224,952,888,824,458,394, 330,266,1016,756,692,628,564,198,990,926,862,798,496,432,368,304,730,666,602, 538,236,964,900,836,772,470,406,342,278,704,640,576,512,210,938,874,810,508, 444,380,316,1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290,716, 652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924, 860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404,340,276, 766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246,182, 974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390,326, 262,1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598,534, 232,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806,504, 440,376,312,738,674,610,546,244,972,908,844,780,478,414,350,286,712,648,584, 520,218,1023,946,882,818,452,388,324,260,1010,686,558,192,920,792,426,298, 660,532,894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476,348, 710,582,216,944,816,450,322,684,556,190,918,790,424,296,658,530,892,398,270, 1020,760,632,994,866,500,372,734,606,240,968,840,474,346,708,580,214,942,814, 448,320,682,554,188] [views:debug,2014-08-19T16:49:33.466,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/182. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:33.466,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",182,active,0} [ns_server:debug,2014-08-19T16:49:33.566,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 180. Nacking mccouch update. [views:debug,2014-08-19T16:49:33.566,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/180. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:33.566,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",180,active,0} [ns_server:debug,2014-08-19T16:49:33.567,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854,488, 360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,904,776, 410,282,644,516,878,384,256,1006,746,618,252,980,916,852,788,486,422,358,294, 720,656,592,528,226,954,890,826,460,396,332,268,1018,758,694,630,566,200,992, 928,864,800,498,434,370,306,732,668,604,540,238,966,902,838,774,472,408,344, 280,706,642,578,514,212,940,876,812,510,446,382,318,1004,744,680,616,552,250, 186,978,914,850,786,484,420,356,292,718,654,590,526,224,952,888,824,458,394, 330,266,1016,756,692,628,564,198,990,926,862,798,496,432,368,304,730,666,602, 538,236,964,900,836,772,470,406,342,278,704,640,576,512,210,938,874,810,508, 444,380,316,1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290,716, 652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924, 860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404,340,276, 766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246,182, 974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390,326, 262,1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598,534, 232,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806,504, 440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350,286,712,648, 584,520,218,1023,946,882,818,452,388,324,260,1010,686,558,192,920,792,426, 298,660,532,894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476, 348,710,582,216,944,816,450,322,684,556,190,918,790,424,296,658,530,892,398, 270,1020,760,632,994,866,500,372,734,606,240,968,840,474,346,708,580,214,942, 814,448,320,682,554,188] [views:debug,2014-08-19T16:49:33.625,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/180. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:33.625,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",180,active,0} [ns_server:debug,2014-08-19T16:49:33.717,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 178. Nacking mccouch update. [views:debug,2014-08-19T16:49:33.717,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/178. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:33.717,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",178,active,0} [ns_server:debug,2014-08-19T16:49:33.718,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854, 488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,904, 776,410,282,644,516,878,384,256,1006,746,618,252,980,916,852,788,486,422,358, 294,720,656,592,528,226,954,890,826,460,396,332,268,1018,758,694,630,566,200, 992,928,864,800,498,434,370,306,732,668,604,540,238,966,902,838,774,472,408, 344,280,706,642,578,514,212,940,876,812,510,446,382,318,1004,744,680,616,552, 250,186,978,914,850,786,484,420,356,292,718,654,590,526,224,952,888,824,458, 394,330,266,1016,756,692,628,564,198,990,926,862,798,496,432,368,304,730,666, 602,538,236,964,900,836,772,470,406,342,278,704,640,576,512,210,938,874,810, 508,444,380,316,1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290, 716,652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562,196,988, 924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404,340, 276,766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246, 182,974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390, 326,262,1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598, 534,232,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806, 504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350,286,712, 648,584,520,218,1023,946,882,818,452,388,324,260,1010,686,558,192,920,792, 426,298,660,532,894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842, 476,348,710,582,216,944,816,450,322,684,556,190,918,790,424,296,658,530,892, 398,270,1020,760,632,994,866,500,372,734,606,240,968,840,474,346,708,580,214, 942,814,448,320,682,554,188] [views:debug,2014-08-19T16:49:33.768,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/178. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:33.768,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",178,active,0} [ns_server:debug,2014-08-19T16:49:33.902,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 176. Nacking mccouch update. [views:debug,2014-08-19T16:49:33.902,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/176. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:33.902,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",176,active,0} [ns_server:debug,2014-08-19T16:49:33.903,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854, 488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,176, 904,776,410,282,644,516,878,384,256,1006,746,618,252,980,916,852,788,486,422, 358,294,720,656,592,528,226,954,890,826,460,396,332,268,1018,758,694,630,566, 200,992,928,864,800,498,434,370,306,732,668,604,540,238,966,902,838,774,472, 408,344,280,706,642,578,514,212,940,876,812,510,446,382,318,1004,744,680,616, 552,250,186,978,914,850,786,484,420,356,292,718,654,590,526,224,952,888,824, 458,394,330,266,1016,756,692,628,564,198,990,926,862,798,496,432,368,304,730, 666,602,538,236,964,900,836,772,470,406,342,278,704,640,576,512,210,938,874, 810,508,444,380,316,1002,742,678,614,550,248,184,976,912,848,784,482,418,354, 290,716,652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562,196, 988,924,860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404, 340,276,766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548, 246,182,974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454, 390,326,262,1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662, 598,534,232,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870, 806,504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350,286, 712,648,584,520,218,1023,946,882,818,452,388,324,260,1010,686,558,192,920, 792,426,298,660,532,894,400,272,1022,762,634,996,868,502,374,736,608,242,970, 842,476,348,710,582,216,944,816,450,322,684,556,190,918,790,424,296,658,530, 892,398,270,1020,760,632,994,866,500,372,734,606,240,968,840,474,346,708,580, 214,942,814,448,320,682,554,188] [rebalance:info,2014-08-19T16:49:33.963,ns_1@10.242.238.88:<0.25622.0>:ns_rebalancer:rebalance:319]Bucket is ready on all nodes [views:debug,2014-08-19T16:49:33.986,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/176. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:33.987,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",176,active,0} [ns_server:debug,2014-08-19T16:49:34.036,ns_1@10.242.238.88:<0.25442.0>:mb_map:generate_map_chain:403]Natural map score: {768,768,768} [ns_server:debug,2014-08-19T16:49:34.067,ns_1@10.242.238.88:<0.25442.0>:mb_map:generate_map_chain:410]Rnd maps scores: {768,768,768}, {768,768,768} [ns_server:debug,2014-08-19T16:49:34.068,ns_1@10.242.238.88:<0.25442.0>:mb_map:generate_map_chain:427]Considering 1 maps: [{768,768,768}] [ns_server:debug,2014-08-19T16:49:34.068,ns_1@10.242.238.88:<0.25442.0>:mb_map:generate_map_chain:439]Best map score: {768,768,768} (true,true,true) [ns_server:debug,2014-08-19T16:49:34.069,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:34.069,ns_1@10.242.238.88:<0.25442.0>:ns_rebalancer:rebalance:373]Target map options: [{replication_topology,star}, {tags,undefined}, {max_slaves,10}] (hash: 133465355) [ns_server:debug,2014-08-19T16:49:34.069,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:34.071,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([vbucket_map_history]..) [ns_server:debug,2014-08-19T16:49:34.074,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: vbucket_map_history -> [{[['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88'|...], [...]|...], [{replication_topology,star},{tags,undefined},{max_slaves,10}]}, {[['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88'|...], [...]|...], [{replication_topology,star},{tags,undefined},{max_slaves,10}]}] [rebalance:info,2014-08-19T16:49:34.070,ns_1@10.242.238.88:<0.25442.0>:ns_rebalancer:run_mover:378]Target map (distance: {768,768,768}): [['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] [ns_server:debug,2014-08-19T16:49:34.081,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:34.081,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:34.084,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:34.091,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[]}, {fastForwardMap,[{0,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {1,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {2,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {3,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {4,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {5,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {6,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {7,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {8,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {9,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {10,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {11,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {12,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {13,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {14,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {15,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {16,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {17,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {18,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {19,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {20,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {21,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {22,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {23,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {24,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {25,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {26,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {27,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {28,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {29,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {30,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {31,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {32,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {33,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {34,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {35,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {36,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {37,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {38,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {39,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {40,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {41,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {42,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {43,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {44,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {45,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {46,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {47,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {48,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {49,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {50,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {51,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {52,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {53,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {54,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {55,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {56,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {57,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {58,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {59,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {60,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {61,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {62,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {63,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {64,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {65,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {66,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {67,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {68,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {69,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {70,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {71,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {72,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {73,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {74,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {75,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {76,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {77,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {78,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {79,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {80,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {81,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {82,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {83,[], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {84,[],['ns_1@10.242.238.88'|...]}, {85,[],[...]}, {86,[],...}, {87,...}, {...}|...]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [user:info,2014-08-19T16:49:34.097,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:init:152]Bucket "default" rebalance does not seem to be swap rebalance [ns_server:debug,2014-08-19T16:49:34.110,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:init:176]The following count of vbuckets do not need to be moved at all: 0 [ns_server:debug,2014-08-19T16:49:34.110,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:init:176]The following moves are planned: [{1023, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1022, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1021, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1020, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1019, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1018, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1017, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1016, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1015, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1014, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1013, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1012, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1011, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1010, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1009, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1008, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1007, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1006, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1005, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1004, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1003, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1002, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1001, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {1000, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {999, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {998, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {997, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {996, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {995, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {994, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {993, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {992, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {991, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {990, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {989, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {988, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {987, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {986, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {985, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {984, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {983, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {982, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {981, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {980, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {979, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {978, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {977, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {976, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {975, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {974, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {973, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {972, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {971, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {970, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {969, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {968, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {967, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {966, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {965, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {964, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {963, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {962, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {961, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {960, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {959, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {958, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {957, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {956, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {955, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {954, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {953, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {952, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {951, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {950, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {949, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {948, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {947, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {946, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {945, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {944, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {943, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {942, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {941, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {940, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {939, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {938, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}, {937, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {936, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {935, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {934, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {933, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {932, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {931, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {930, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {929, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {928, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {927, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {926, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {925, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {924, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {923, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {922, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {921, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {920, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {919, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {918, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {917, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {916, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {915, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {914, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {913, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {912, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {911, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {910, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {909, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {908, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {907, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {906, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {905, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {904, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {903, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {902, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {901, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {900, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {899, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {898, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {897, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {896, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {895, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {894, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {893, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {892, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {891, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {890, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {889, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {888, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {887, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {886, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {885, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {884, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {883, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {882, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {881, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {880, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {879, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {878, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {877, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {876, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {875, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {874, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {873, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {872, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {871, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {870, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {869, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {868, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {867, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {866, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {865, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {864, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {863, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {862, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {861, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {860, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {859, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {858, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {857, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {856, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {855, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {854, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {853, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}, {852, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {851, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {850, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {849, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {848, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {847, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {846, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {845, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {844, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {843, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {842, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {841, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {840, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {839, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {838, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {837, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {836, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {835, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {834, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {833, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {832, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {831, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {830, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {829, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {828, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {827, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {826, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {825, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {824, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {823, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {822, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {821, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {820, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {819, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {818, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {817, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {816, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {815, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {814, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {813, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {812, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {811, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {810, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {809, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {808, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {807, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {806, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {805, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {804, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {803, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {802, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {801, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {800, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {799, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {798, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {797, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {796, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {795, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {794, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {793, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {792, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {791, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {790, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {789, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {788, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {787, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {786, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {785, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {784, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {783, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {782, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {781, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {780, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {779, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {778, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {777, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {776, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {775, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {774, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {773, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {772, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {771, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {770, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {769, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {768, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88']}, {767, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {766, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {765, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {764, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {763, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {762, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {761, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {760, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {759, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {758, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {757, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {756, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {755, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {754, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {753, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {752, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {751, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {750, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {749, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {748, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {747, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {746, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {745, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {744, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {743, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {742, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {741, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {740, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {739, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {738, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {737, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {736, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {735, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {734, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {733, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {732, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {731, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {730, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {729, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {728, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {727, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {726, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {725, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {724, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {723, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {722, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {721, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {720, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {719, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {718, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {717, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {716, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {715, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {714, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {713, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {712, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {711, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {710, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {709, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {708, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {707, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {706, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {705, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {704, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {703, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {702, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {701, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {700, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {699, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {698, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {697, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {696, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {695, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {694, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {693, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {692, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {691, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {690, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {689, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {688, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {687, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {686, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {685, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {684, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {683, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {682, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}, {681, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {680, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {679, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {678, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {677, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {676, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {675, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {674, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {673, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {672, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {671, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {670, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {669, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {668, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {667, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {666, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {665, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {664, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {663, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {662, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {661, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {660, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {659, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {658, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {657, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {656, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {655, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {654, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {653, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {652, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {651, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {650, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {649, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {648, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {647, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {646, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {645, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {644, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {643, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {642, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {641, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {640, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {639, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {638, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {637, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {636, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {635, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {634, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {633, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {632, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {631, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {630, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {629, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {628, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {627, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {626, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {625, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {624, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {623, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {622, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {621, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {620, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {619, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {618, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {617, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {616, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {615, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {614, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {613, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {612, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {611, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {610, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {609, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {608, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {607, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {606, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {605, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {604, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {603, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {602, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {601, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {600, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {599, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {598, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {597, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}, {596, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {595, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {594, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {593, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {592, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {591, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {590, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {589, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {588, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {587, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {586, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {585, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {584, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {583, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {582, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {581, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {580, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {579, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {578, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {577, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {576, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {575, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {574, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {573, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {572, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {571, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {570, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {569, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {568, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {567, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {566, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {565, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {564, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {563, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {562, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {561, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {560, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {559, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {558, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {557, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {556, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {555, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {554, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {553, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {552, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {551, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {550, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {549, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {548, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {547, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {546, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {545, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {544, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {543, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {542, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {541, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {540, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {539, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {538, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {537, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {536, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {535, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {534, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {533, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {532, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {531, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {530, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {529, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {528, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {527, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {526, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {525, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {524, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {523, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {522, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {521, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {520, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {519, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {518, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {517, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {516, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {515, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {514, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {513, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {512, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88']}, {511, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {510, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {509, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {508, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {507, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {506, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {505, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {504, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {503, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {502, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {501, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {500, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {499, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {498, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {497, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {496, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {495, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {494, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {493, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {492, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {491, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {490, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {489, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {488, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {487, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {486, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {485, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {484, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {483, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {482, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {481, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {480, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {479, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {478, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {477, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {476, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {475, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {474, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {473, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {472, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {471, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {470, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {469, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {468, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {467, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {466, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {465, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {464, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {463, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {462, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {461, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {460, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {459, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {458, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {457, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {456, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {455, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {454, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {453, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {452, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {451, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {450, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {449, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {448, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {447, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {446, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {445, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {444, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {443, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {442, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {441, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {440, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {439, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {438, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {437, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {436, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {435, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {434, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {433, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {432, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {431, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {430, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {429, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {428, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {427, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}, {426, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {425, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {424, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {423, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {422, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {421, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {420, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {419, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {418, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {417, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {416, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {415, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {414, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {413, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {412, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {411, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {410, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {409, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {408, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {407, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {406, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {405, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {404, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {403, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {402, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {401, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {400, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {399, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {398, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {397, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {396, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {395, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {394, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {393, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {392, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {391, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {390, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {389, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {388, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {387, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {386, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {385, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {384, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {383, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {382, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {381, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {380, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {379, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {378, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {377, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {376, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {375, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {374, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {373, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {372, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {371, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {370, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {369, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {368, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {367, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {366, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {365, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {364, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {363, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {362, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {361, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {360, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {359, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {358, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {357, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {356, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {355, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {354, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {353, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {352, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {351, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {350, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {349, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {348, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {347, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {346, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {345, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {344, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {343, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {342, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}, {341, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {340, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {339, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {338, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {337, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {336, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {335, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {334, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {333, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {332, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {331, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {330, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {329, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {328, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {327, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {326, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {325, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {324, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {323, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {322, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {321, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {320, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {319, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {318, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {317, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {316, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {315, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {314, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {313, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {312, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {311, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {310, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {309, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {308, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {307, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {306, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {305, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {304, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {303, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {302, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {301, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {300, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {299, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {298, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {297, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {296, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {295, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {294, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {293, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {292, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {291, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {290, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {289, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {288, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {287, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {286, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {285, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {284, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {283, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {282, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {281, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {280, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {279, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {278, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {277, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {276, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {275, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {274, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {273, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {272, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {271, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {270, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {269, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {268, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {267, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {266, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {265, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {264, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {263, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {262, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {261, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {260, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {259, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {258, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {257, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {256, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88']}, {255, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {254, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {253, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {252, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {251, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {250, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {249, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {248, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {247, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {246, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {245, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {244, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {243, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {242, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {241, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {240, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {239, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {238, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {237, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {236, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {235, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {234, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {233, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {232, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {231, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {230, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {229, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {228, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {227, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {226, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {225, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {224, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {223, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {222, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {221, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {220, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {219, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {218, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {217, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {216, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {215, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {214, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {213, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {212, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {211, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {210, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {209, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {208, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {207, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {206, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {205, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {204, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {203, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {202, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {201, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {200, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {199, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {198, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {197, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {196, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {195, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {194, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {193, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {192, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {191, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {190, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {189, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {188, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {187, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {186, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {185, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {184, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {183, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {182, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {181, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {180, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {179, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {178, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {177, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {176, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {175, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {174, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {173, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {172, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {171, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91']}, {170, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {169, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {168, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {167, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {166, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {165, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {164, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {163, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {162, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {161, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {160, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {159, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {158, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {157, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {156, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {155, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {154, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {153, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {152, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {151, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {150, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {149, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {148, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {147, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {146, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {145, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {144, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {143, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {142, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {141, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {140, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {139, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {138, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {137, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {136, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {135, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {134, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {133, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {132, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {131, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {130, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {129, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {128, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {127, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {126, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {125, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {124, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {123, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {122, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {121, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {120, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {119, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {118, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {117, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {116, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {115, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {114, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {113, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {112, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {111, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {110, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {109, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {108, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {107, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {106, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {105, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {104, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {103, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {102, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {101, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {100, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {99, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {98, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {97, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {96, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {95, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {94, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {93, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {92, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {91, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {90, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {89, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {88, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {87, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {86, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90']}, {85, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {84, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {83, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {82, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {81, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {80, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {79, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {78, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {77, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {76, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {75, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {74, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {73, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {72, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {71, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {70, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {69, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {68, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {67, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {66, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {65, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {64, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {63, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {62, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {61, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {60, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {59, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {58, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {57, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {56, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {55, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {54, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {53, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {52, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {51, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {50, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {49, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {48, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {47, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {46, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {45, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {44, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {43, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {42, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {41, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {40, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {39, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {38, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {37, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {36, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {35, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {34, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {33, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {32, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {31, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {30, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {29, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {28, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {27, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {26, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {25, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {24, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {23, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {22, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {21, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {20, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {19, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {18, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {17, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {16, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {15, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {14, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {13, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {12, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {11, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {10, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {9, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {8, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {7, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {6, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {5, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {4, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {3, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {2, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {1, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}, {0, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89']}] [ns_server:debug,2014-08-19T16:49:34.139,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1023, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:34.142,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1023, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.25760.0>) [rebalance:info,2014-08-19T16:49:34.142,ns_1@10.242.238.88:<0.25760.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1023 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:34.142,ns_1@10.242.238.88:<0.25766.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1023 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:34.142,ns_1@10.242.238.88:<0.25767.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1023 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:34.161,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 174. Nacking mccouch update. [views:debug,2014-08-19T16:49:34.161,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/174. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:34.161,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",174,active,0} [ns_server:debug,2014-08-19T16:49:34.163,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854, 488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,176, 904,776,410,282,644,516,878,384,256,1006,746,618,252,980,852,486,358,720,656, 592,528,226,954,890,826,460,396,332,268,1018,758,694,630,566,200,992,928,864, 800,498,434,370,306,732,668,604,540,238,174,966,902,838,774,472,408,344,280, 706,642,578,514,212,940,876,812,510,446,382,318,1004,744,680,616,552,250,186, 978,914,850,786,484,420,356,292,718,654,590,526,224,952,888,824,458,394,330, 266,1016,756,692,628,564,198,990,926,862,798,496,432,368,304,730,666,602,538, 236,964,900,836,772,470,406,342,278,704,640,576,512,210,938,874,810,508,444, 380,316,1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290,716,652, 588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924,860, 796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404,340,276,766, 702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,974, 910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390,326,262, 1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598,534,232, 960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806,504,440, 376,312,738,674,610,546,244,180,972,908,844,780,478,414,350,286,712,648,584, 520,218,1023,946,882,818,452,388,324,260,1010,686,558,192,920,792,426,298, 660,532,894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476,348, 710,582,216,944,816,450,322,684,556,190,918,790,424,296,658,530,892,398,270, 1020,760,632,994,866,500,372,734,606,240,968,840,474,346,708,580,214,942,814, 448,320,682,554,188,916,788,422,294] [ns_server:debug,2014-08-19T16:49:34.170,ns_1@10.242.238.88:<0.25768.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1023 into 'ns_1@10.242.238.90' is <18125.18828.0> [ns_server:debug,2014-08-19T16:49:34.175,ns_1@10.242.238.88:<0.25768.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1023 into 'ns_1@10.242.238.91' is <18126.18782.0> [rebalance:debug,2014-08-19T16:49:34.176,ns_1@10.242.238.88:<0.25760.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1023 is <0.25768.0> [views:debug,2014-08-19T16:49:34.237,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/174. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:34.237,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",174,active,0} [ns_server:debug,2014-08-19T16:49:34.286,ns_1@10.242.238.88:<0.25769.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.18782.0>}, {'ns_1@10.242.238.90',<18125.18828.0>}]) [rebalance:info,2014-08-19T16:49:34.286,ns_1@10.242.238.88:<0.25760.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:34.287,ns_1@10.242.238.88:<0.25760.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1023 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:34.287,ns_1@10.242.238.88:<0.25760.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:34.288,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1023, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:34.293,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{767, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:34.293,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",767, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.25795.0>) [ns_server:debug,2014-08-19T16:49:34.294,ns_1@10.242.238.88:<0.25796.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [rebalance:info,2014-08-19T16:49:34.294,ns_1@10.242.238.88:<0.25795.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 767 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:34.294,ns_1@10.242.238.88:<0.25801.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 767 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:34.294,ns_1@10.242.238.88:<0.25802.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 767 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:34.297,ns_1@10.242.238.88:<0.25803.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 767 into 'ns_1@10.242.238.91' is <18126.18802.0> [ns_server:debug,2014-08-19T16:49:34.299,ns_1@10.242.238.88:<0.25803.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 767 into 'ns_1@10.242.238.90' is <18125.18847.0> [rebalance:debug,2014-08-19T16:49:34.299,ns_1@10.242.238.88:<0.25795.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 767 is <0.25803.0> [ns_server:debug,2014-08-19T16:49:34.273,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:initiate_bucket_rebalance:200]Initial estimates: [{{'ns_1@10.242.238.88',0},{0,0}}, {{'ns_1@10.242.238.88',0},{0,0}}, {{'ns_1@10.242.238.88',1},{0,0}}, {{'ns_1@10.242.238.88',1},{0,0}}, {{'ns_1@10.242.238.88',2},{0,0}}, {{'ns_1@10.242.238.88',2},{0,0}}, {{'ns_1@10.242.238.88',3},{0,0}}, {{'ns_1@10.242.238.88',3},{0,0}}, {{'ns_1@10.242.238.88',4},{0,0}}, {{'ns_1@10.242.238.88',4},{0,0}}, {{'ns_1@10.242.238.88',5},{0,0}}, {{'ns_1@10.242.238.88',5},{0,0}}, {{'ns_1@10.242.238.88',6},{0,0}}, {{'ns_1@10.242.238.88',6},{0,0}}, {{'ns_1@10.242.238.88',7},{0,0}}, {{'ns_1@10.242.238.88',7},{0,0}}, {{'ns_1@10.242.238.88',8},{0,0}}, {{'ns_1@10.242.238.88',8},{0,0}}, {{'ns_1@10.242.238.88',9},{0,0}}, {{'ns_1@10.242.238.88',9},{0,0}}, {{'ns_1@10.242.238.88',10},{0,0}}, {{'ns_1@10.242.238.88',10},{0,0}}, {{'ns_1@10.242.238.88',11},{0,0}}, {{'ns_1@10.242.238.88',11},{0,0}}, {{'ns_1@10.242.238.88',12},{0,0}}, {{'ns_1@10.242.238.88',12},{0,0}}, {{'ns_1@10.242.238.88',13},{0,0}}, {{'ns_1@10.242.238.88',13},{0,0}}, {{'ns_1@10.242.238.88',14},{0,0}}, {{'ns_1@10.242.238.88',14},{0,0}}, {{'ns_1@10.242.238.88',15},{0,0}}, {{'ns_1@10.242.238.88',15},{0,0}}, {{'ns_1@10.242.238.88',16},{0,0}}, {{'ns_1@10.242.238.88',16},{0,0}}, {{'ns_1@10.242.238.88',17},{0,0}}, {{'ns_1@10.242.238.88',17},{0,0}}, {{'ns_1@10.242.238.88',18},{0,0}}, {{'ns_1@10.242.238.88',18},{0,0}}, {{'ns_1@10.242.238.88',19},{0,0}}, {{'ns_1@10.242.238.88',19},{0,0}}, {{'ns_1@10.242.238.88',20},{0,0}}, {{'ns_1@10.242.238.88',20},{0,0}}, {{'ns_1@10.242.238.88',21},{0,0}}, {{'ns_1@10.242.238.88',21},{0,0}}, {{'ns_1@10.242.238.88',22},{0,0}}, {{'ns_1@10.242.238.88',22},{0,0}}, {{'ns_1@10.242.238.88',23},{0,0}}, {{'ns_1@10.242.238.88',23},{0,0}}, {{'ns_1@10.242.238.88',24},{0,0}}, {{'ns_1@10.242.238.88',24},{0,0}}, {{'ns_1@10.242.238.88',25},{0,0}}, {{'ns_1@10.242.238.88',25},{0,0}}, {{'ns_1@10.242.238.88',26},{0,0}}, {{'ns_1@10.242.238.88',26},{0,0}}, {{'ns_1@10.242.238.88',27},{0,0}}, {{'ns_1@10.242.238.88',27},{0,0}}, {{'ns_1@10.242.238.88',28},{0,0}}, {{'ns_1@10.242.238.88',28},{0,0}}, {{'ns_1@10.242.238.88',29},{0,0}}, {{'ns_1@10.242.238.88',29},{0,0}}, {{'ns_1@10.242.238.88',30},{0,0}}, {{'ns_1@10.242.238.88',30},{0,0}}, {{'ns_1@10.242.238.88',31},{0,0}}, {{'ns_1@10.242.238.88',31},{0,0}}, {{'ns_1@10.242.238.88',32},{0,0}}, {{'ns_1@10.242.238.88',32},{0,0}}, {{'ns_1@10.242.238.88',33},{0,0}}, {{'ns_1@10.242.238.88',33},{0,0}}, {{'ns_1@10.242.238.88',34},{0,0}}, {{'ns_1@10.242.238.88',34},{0,0}}, {{'ns_1@10.242.238.88',35},{0,0}}, {{'ns_1@10.242.238.88',35},{0,0}}, {{'ns_1@10.242.238.88',36},{0,0}}, {{'ns_1@10.242.238.88',36},{0,0}}, {{'ns_1@10.242.238.88',37},{0,0}}, {{'ns_1@10.242.238.88',37},{0,0}}, {{'ns_1@10.242.238.88',38},{0,0}}, {{'ns_1@10.242.238.88',38},{0,0}}, {{'ns_1@10.242.238.88',39},{0,0}}, {{'ns_1@10.242.238.88',39},{0,0}}, {{'ns_1@10.242.238.88',40},{0,0}}, {{'ns_1@10.242.238.88',40},{0,0}}, {{'ns_1@10.242.238.88',41},{0,0}}, {{'ns_1@10.242.238.88',41},{0,0}}, {{'ns_1@10.242.238.88',42},{0,0}}, {{'ns_1@10.242.238.88',42},{0,0}}, {{'ns_1@10.242.238.88',43},{0,0}}, {{'ns_1@10.242.238.88',43},{0,0}}, {{'ns_1@10.242.238.88',44},{0,0}}, {{'ns_1@10.242.238.88',44},{0,0}}, {{'ns_1@10.242.238.88',45},{0,0}}, {{'ns_1@10.242.238.88',45},{0,0}}, {{'ns_1@10.242.238.88',46},{0,0}}, {{'ns_1@10.242.238.88',46},{0,0}}, {{'ns_1@10.242.238.88',47},{0,0}}, {{'ns_1@10.242.238.88',47},{0,0}}, {{'ns_1@10.242.238.88',48},{0,0}}, {{'ns_1@10.242.238.88',48},{0,0}}, {{'ns_1@10.242.238.88',49},{0,0}}, {{'ns_1@10.242.238.88',49},{0,0}}, {{'ns_1@10.242.238.88',50},{0,0}}, {{'ns_1@10.242.238.88',50},{0,0}}, {{'ns_1@10.242.238.88',51},{0,0}}, {{'ns_1@10.242.238.88',51},{0,0}}, {{'ns_1@10.242.238.88',52},{0,0}}, {{'ns_1@10.242.238.88',52},{0,0}}, {{'ns_1@10.242.238.88',53},{0,0}}, {{'ns_1@10.242.238.88',53},{0,0}}, {{'ns_1@10.242.238.88',54},{0,0}}, {{'ns_1@10.242.238.88',54},{0,0}}, {{'ns_1@10.242.238.88',55},{0,0}}, {{'ns_1@10.242.238.88',55},{0,0}}, {{'ns_1@10.242.238.88',56},{0,0}}, {{'ns_1@10.242.238.88',56},{0,0}}, {{'ns_1@10.242.238.88',57},{0,0}}, {{'ns_1@10.242.238.88',57},{0,0}}, {{'ns_1@10.242.238.88',58},{0,0}}, {{'ns_1@10.242.238.88',58},{0,0}}, {{'ns_1@10.242.238.88',59},{0,0}}, {{'ns_1@10.242.238.88',59},{0,0}}, {{'ns_1@10.242.238.88',60},{0,0}}, {{'ns_1@10.242.238.88',60},{0,0}}, {{'ns_1@10.242.238.88',61},{0,0}}, {{'ns_1@10.242.238.88',61},{0,0}}, {{'ns_1@10.242.238.88',62},{0,0}}, {{'ns_1@10.242.238.88',62},{0,0}}, {{'ns_1@10.242.238.88',63},{0,0}}, {{'ns_1@10.242.238.88',63},{0,0}}, {{'ns_1@10.242.238.88',64},{0,0}}, {{'ns_1@10.242.238.88',64},{0,0}}, {{'ns_1@10.242.238.88',65},{0,0}}, {{'ns_1@10.242.238.88',65},{0,0}}, {{'ns_1@10.242.238.88',66},{0,0}}, {{'ns_1@10.242.238.88',66},{0,0}}, {{'ns_1@10.242.238.88',67},{0,0}}, {{'ns_1@10.242.238.88',67},{0,0}}, {{'ns_1@10.242.238.88',68},{0,0}}, {{'ns_1@10.242.238.88',68},{0,0}}, {{'ns_1@10.242.238.88',69},{0,0}}, {{'ns_1@10.242.238.88',69},{0,0}}, {{'ns_1@10.242.238.88',70},{0,0}}, {{'ns_1@10.242.238.88',70},{0,0}}, {{'ns_1@10.242.238.88',71},{0,0}}, {{'ns_1@10.242.238.88',71},{0,0}}, {{'ns_1@10.242.238.88',72},{0,0}}, {{'ns_1@10.242.238.88',72},{0,0}}, {{'ns_1@10.242.238.88',73},{0,0}}, {{'ns_1@10.242.238.88',73},{0,0}}, {{'ns_1@10.242.238.88',74},{0,0}}, {{'ns_1@10.242.238.88',74},{0,0}}, {{'ns_1@10.242.238.88',75},{0,0}}, {{'ns_1@10.242.238.88',75},{0,0}}, {{'ns_1@10.242.238.88',76},{0,0}}, {{'ns_1@10.242.238.88',76},{0,0}}, {{'ns_1@10.242.238.88',77},{0,0}}, {{'ns_1@10.242.238.88',77},{0,0}}, {{'ns_1@10.242.238.88',78},{0,0}}, {{'ns_1@10.242.238.88',78},{0,0}}, {{'ns_1@10.242.238.88',79},{0,0}}, {{'ns_1@10.242.238.88',79},{0,0}}, {{'ns_1@10.242.238.88',80},{0,0}}, {{'ns_1@10.242.238.88',80},{0,0}}, {{'ns_1@10.242.238.88',81},{0,0}}, {{'ns_1@10.242.238.88',81},{0,0}}, {{'ns_1@10.242.238.88',82},{0,0}}, {{'ns_1@10.242.238.88',82},{0,0}}, {{'ns_1@10.242.238.88',83},{0,0}}, {{'ns_1@10.242.238.88',83},{0,0}}, {{'ns_1@10.242.238.88',84},{0,0}}, {{'ns_1@10.242.238.88',84},{0,0}}, {{'ns_1@10.242.238.88',85},{0,0}}, {{'ns_1@10.242.238.88',85},{0,0}}, {{'ns_1@10.242.238.88',86},{0,0}}, {{'ns_1@10.242.238.88',86},{0,0}}, {{'ns_1@10.242.238.88',87},{0,0}}, {{'ns_1@10.242.238.88',87},{0,0}}, {{'ns_1@10.242.238.88',88},{0,0}}, {{'ns_1@10.242.238.88',88},{0,0}}, {{'ns_1@10.242.238.88',89},{0,0}}, {{'ns_1@10.242.238.88',89},{0,0}}, {{'ns_1@10.242.238.88',90},{0,0}}, {{'ns_1@10.242.238.88',90},{0,0}}, {{'ns_1@10.242.238.88',91},{0,0}}, {{'ns_1@10.242.238.88',91},{0,0}}, {{'ns_1@10.242.238.88',92},{0,0}}, {{'ns_1@10.242.238.88',92},{0,0}}, {{'ns_1@10.242.238.88',93},{0,0}}, {{'ns_1@10.242.238.88',93},{0,0}}, {{'ns_1@10.242.238.88',94},{0,0}}, {{'ns_1@10.242.238.88',94},{0,0}}, {{'ns_1@10.242.238.88',95},{0,0}}, {{'ns_1@10.242.238.88',95},{0,0}}, {{'ns_1@10.242.238.88',96},{0,0}}, {{'ns_1@10.242.238.88',96},{0,0}}, {{'ns_1@10.242.238.88',97},{0,0}}, {{'ns_1@10.242.238.88',97},{0,0}}, {{'ns_1@10.242.238.88',98},{0,0}}, {{'ns_1@10.242.238.88',98},{0,0}}, {{'ns_1@10.242.238.88',99},{0,0}}, {{'ns_1@10.242.238.88',99},{0,0}}, {{'ns_1@10.242.238.88',100},{0,0}}, {{'ns_1@10.242.238.88',100},{0,0}}, {{'ns_1@10.242.238.88',101},{0,0}}, {{'ns_1@10.242.238.88',101},{0,0}}, {{'ns_1@10.242.238.88',102},{0,0}}, {{'ns_1@10.242.238.88',102},{0,0}}, {{'ns_1@10.242.238.88',103},{0,0}}, {{'ns_1@10.242.238.88',103},{0,0}}, {{'ns_1@10.242.238.88',104},{0,0}}, {{'ns_1@10.242.238.88',104},{0,0}}, {{'ns_1@10.242.238.88',105},{0,0}}, {{'ns_1@10.242.238.88',105},{0,0}}, {{'ns_1@10.242.238.88',106},{0,0}}, {{'ns_1@10.242.238.88',106},{0,0}}, {{'ns_1@10.242.238.88',107},{0,0}}, {{'ns_1@10.242.238.88',107},{0,0}}, {{'ns_1@10.242.238.88',108},{0,0}}, {{'ns_1@10.242.238.88',108},{0,0}}, {{'ns_1@10.242.238.88',109},{0,0}}, {{'ns_1@10.242.238.88',109},{0,0}}, {{'ns_1@10.242.238.88',110},{0,0}}, {{'ns_1@10.242.238.88',110},{0,0}}, {{'ns_1@10.242.238.88',111},{0,0}}, {{'ns_1@10.242.238.88',111},{0,0}}, {{'ns_1@10.242.238.88',112},{0,0}}, {{'ns_1@10.242.238.88',112},{0,0}}, {{'ns_1@10.242.238.88',113},{0,0}}, {{'ns_1@10.242.238.88',113},{0,0}}, {{'ns_1@10.242.238.88',114},{0,0}}, {{'ns_1@10.242.238.88',114},{0,0}}, {{'ns_1@10.242.238.88',115},{0,0}}, {{'ns_1@10.242.238.88',115},{0,0}}, {{'ns_1@10.242.238.88',116},{0,0}}, {{'ns_1@10.242.238.88',116},{0,0}}, {{'ns_1@10.242.238.88',117},{0,0}}, {{'ns_1@10.242.238.88',117},{0,0}}, {{'ns_1@10.242.238.88',118},{0,0}}, {{'ns_1@10.242.238.88',118},{0,0}}, {{'ns_1@10.242.238.88',119},{0,0}}, {{'ns_1@10.242.238.88',119},{0,0}}, {{'ns_1@10.242.238.88',120},{0,0}}, {{'ns_1@10.242.238.88',120},{0,0}}, {{'ns_1@10.242.238.88',121},{0,0}}, {{'ns_1@10.242.238.88',121},{0,0}}, {{'ns_1@10.242.238.88',122},{0,0}}, {{'ns_1@10.242.238.88',122},{0,0}}, {{'ns_1@10.242.238.88',123},{0,0}}, {{'ns_1@10.242.238.88',123},{0,0}}, {{'ns_1@10.242.238.88',124},{0,0}}, {{'ns_1@10.242.238.88',124},{0,0}}, {{'ns_1@10.242.238.88',125},{0,0}}, {{'ns_1@10.242.238.88',125},{0,0}}, {{'ns_1@10.242.238.88',126},{0,0}}, {{'ns_1@10.242.238.88',126},{0,0}}, {{'ns_1@10.242.238.88',127},{0,0}}, {{'ns_1@10.242.238.88',127},{0,0}}, {{'ns_1@10.242.238.88',128},{0,0}}, {{'ns_1@10.242.238.88',128},{0,0}}, {{'ns_1@10.242.238.88',129},{0,0}}, {{'ns_1@10.242.238.88',129},{0,0}}, {{'ns_1@10.242.238.88',130},{0,0}}, {{'ns_1@10.242.238.88',130},{0,0}}, {{'ns_1@10.242.238.88',131},{0,0}}, {{'ns_1@10.242.238.88',131},{0,0}}, {{'ns_1@10.242.238.88',132},{0,0}}, {{'ns_1@10.242.238.88',132},{0,0}}, {{'ns_1@10.242.238.88',133},{0,0}}, {{'ns_1@10.242.238.88',133},{0,0}}, {{'ns_1@10.242.238.88',134},{0,0}}, {{'ns_1@10.242.238.88',134},{0,0}}, {{'ns_1@10.242.238.88',135},{0,0}}, {{'ns_1@10.242.238.88',135},{0,0}}, {{'ns_1@10.242.238.88',136},{0,0}}, {{'ns_1@10.242.238.88',136},{0,0}}, {{'ns_1@10.242.238.88',137},{0,0}}, {{'ns_1@10.242.238.88',137},{0,0}}, {{'ns_1@10.242.238.88',138},{0,0}}, {{'ns_1@10.242.238.88',138},{0,0}}, {{'ns_1@10.242.238.88',139},{0,0}}, {{'ns_1@10.242.238.88',139},{0,0}}, {{'ns_1@10.242.238.88',140},{0,0}}, {{'ns_1@10.242.238.88',140},{0,0}}, {{'ns_1@10.242.238.88',141},{0,0}}, {{'ns_1@10.242.238.88',141},{0,0}}, {{'ns_1@10.242.238.88',142},{0,0}}, {{'ns_1@10.242.238.88',142},{0,0}}, {{'ns_1@10.242.238.88',143},{0,0}}, {{'ns_1@10.242.238.88',143},{0,0}}, {{'ns_1@10.242.238.88',144},{0,0}}, {{'ns_1@10.242.238.88',144},{0,0}}, {{'ns_1@10.242.238.88',145},{0,0}}, {{'ns_1@10.242.238.88',145},{0,0}}, {{'ns_1@10.242.238.88',146},{0,0}}, {{'ns_1@10.242.238.88',146},{0,0}}, {{'ns_1@10.242.238.88',147},{0,0}}, {{'ns_1@10.242.238.88',147},{0,0}}, {{'ns_1@10.242.238.88',148},{0,0}}, {{'ns_1@10.242.238.88',148},{0,0}}, {{'ns_1@10.242.238.88',149},{0,0}}, {{'ns_1@10.242.238.88',149},{0,0}}, {{'ns_1@10.242.238.88',150},{0,0}}, {{'ns_1@10.242.238.88',150},{0,0}}, {{'ns_1@10.242.238.88',151},{0,0}}, {{'ns_1@10.242.238.88',151},{0,0}}, {{'ns_1@10.242.238.88',152},{0,0}}, {{'ns_1@10.242.238.88',152},{0,0}}, {{'ns_1@10.242.238.88',153},{0,0}}, {{'ns_1@10.242.238.88',153},{0,0}}, {{'ns_1@10.242.238.88',154},{0,0}}, {{'ns_1@10.242.238.88',154},{0,0}}, {{'ns_1@10.242.238.88',155},{0,0}}, {{'ns_1@10.242.238.88',155},{0,0}}, {{'ns_1@10.242.238.88',156},{0,0}}, {{'ns_1@10.242.238.88',156},{0,0}}, {{'ns_1@10.242.238.88',157},{0,0}}, {{'ns_1@10.242.238.88',157},{0,0}}, {{'ns_1@10.242.238.88',158},{0,0}}, {{'ns_1@10.242.238.88',158},{0,0}}, {{'ns_1@10.242.238.88',159},{0,0}}, {{'ns_1@10.242.238.88',159},{0,0}}, {{'ns_1@10.242.238.88',160},{0,0}}, {{'ns_1@10.242.238.88',160},{0,0}}, {{'ns_1@10.242.238.88',161},{0,0}}, {{'ns_1@10.242.238.88',161},{0,0}}, {{'ns_1@10.242.238.88',162},{0,0}}, {{'ns_1@10.242.238.88',162},{0,0}}, {{'ns_1@10.242.238.88',163},{0,0}}, {{'ns_1@10.242.238.88',163},{0,0}}, {{'ns_1@10.242.238.88',164},{0,0}}, {{'ns_1@10.242.238.88',164},{0,0}}, {{'ns_1@10.242.238.88',165},{0,0}}, {{'ns_1@10.242.238.88',165},{0,0}}, {{'ns_1@10.242.238.88',166},{0,0}}, {{'ns_1@10.242.238.88',166},{0,0}}, {{'ns_1@10.242.238.88',167},{0,0}}, {{'ns_1@10.242.238.88',167},{0,0}}, {{'ns_1@10.242.238.88',168},{0,0}}, {{'ns_1@10.242.238.88',168},{0,0}}, {{'ns_1@10.242.238.88',169},{0,0}}, {{'ns_1@10.242.238.88',169},{0,0}}, {{'ns_1@10.242.238.88',170},{0,0}}, {{'ns_1@10.242.238.88',170},{0,0}}, {{'ns_1@10.242.238.88',171},{0,0}}, {{'ns_1@10.242.238.88',171},{0,0}}, {{'ns_1@10.242.238.88',172},{0,0}}, {{'ns_1@10.242.238.88',172},{0,0}}, {{'ns_1@10.242.238.88',173},{0,0}}, {{'ns_1@10.242.238.88',173},{0,0}}, {{'ns_1@10.242.238.88',174},{0,0}}, {{'ns_1@10.242.238.88',174},{0,0}}, {{'ns_1@10.242.238.88',175},{0,0}}, {{'ns_1@10.242.238.88',175},{0,0}}, {{'ns_1@10.242.238.88',176},{0,0}}, {{'ns_1@10.242.238.88',176},{0,0}}, {{'ns_1@10.242.238.88',177},{0,0}}, {{'ns_1@10.242.238.88',177},{0,0}}, {{'ns_1@10.242.238.88',178},{0,0}}, {{'ns_1@10.242.238.88',178},{0,0}}, {{'ns_1@10.242.238.88',179},{0,0}}, {{'ns_1@10.242.238.88',179},{0,0}}, {{'ns_1@10.242.238.88',180},{0,0}}, {{'ns_1@10.242.238.88',180},{0,0}}, {{'ns_1@10.242.238.88',181},{0,0}}, {{'ns_1@10.242.238.88',181},{0,0}}, {{'ns_1@10.242.238.88',182},{0,0}}, {{'ns_1@10.242.238.88',182},{0,0}}, {{'ns_1@10.242.238.88',183},{0,0}}, {{'ns_1@10.242.238.88',183},{0,0}}, {{'ns_1@10.242.238.88',184},{0,0}}, {{'ns_1@10.242.238.88',184},{0,0}}, {{'ns_1@10.242.238.88',185},{0,0}}, {{'ns_1@10.242.238.88',185},{0,0}}, {{'ns_1@10.242.238.88',186},{0,0}}, {{'ns_1@10.242.238.88',186},{0,0}}, {{'ns_1@10.242.238.88',187},{0,0}}, {{'ns_1@10.242.238.88',187},{0,0}}, {{'ns_1@10.242.238.88',188},{0,0}}, {{'ns_1@10.242.238.88',188},{0,0}}, {{'ns_1@10.242.238.88',189},{0,0}}, {{'ns_1@10.242.238.88',189},{0,0}}, {{'ns_1@10.242.238.88',190},{0,0}}, {{'ns_1@10.242.238.88',190},{0,0}}, {{'ns_1@10.242.238.88',191},{0,0}}, {{'ns_1@10.242.238.88',191},{0,0}}, {{'ns_1@10.242.238.88',192},{0,0}}, {{'ns_1@10.242.238.88',192},{0,0}}, {{'ns_1@10.242.238.88',193},{0,0}}, {{'ns_1@10.242.238.88',193},{0,0}}, {{'ns_1@10.242.238.88',194},{0,0}}, {{'ns_1@10.242.238.88',194},{0,0}}, {{'ns_1@10.242.238.88',195},{0,0}}, {{'ns_1@10.242.238.88',195},{0,0}}, {{'ns_1@10.242.238.88',196},{0,0}}, {{'ns_1@10.242.238.88',196},{0,0}}, {{'ns_1@10.242.238.88',197},{0,0}}, {{'ns_1@10.242.238.88',197},{0,0}}, {{'ns_1@10.242.238.88',198},{0,0}}, {{'ns_1@10.242.238.88',198},{0,0}}, {{'ns_1@10.242.238.88',199},{0,0}}, {{'ns_1@10.242.238.88',199},{0,0}}, {{'ns_1@10.242.238.88',200},{0,0}}, {{'ns_1@10.242.238.88',200},{0,0}}, {{'ns_1@10.242.238.88',201},{0,0}}, {{'ns_1@10.242.238.88',201},{0,0}}, {{'ns_1@10.242.238.88',202},{0,0}}, {{'ns_1@10.242.238.88',202},{0,0}}, {{'ns_1@10.242.238.88',203},{0,0}}, {{'ns_1@10.242.238.88',203},{0,0}}, {{'ns_1@10.242.238.88',204},{0,0}}, {{'ns_1@10.242.238.88',204},{0,0}}, {{'ns_1@10.242.238.88',205},{0,0}}, {{'ns_1@10.242.238.88',205},{0,0}}, {{'ns_1@10.242.238.88',206},{0,0}}, {{'ns_1@10.242.238.88',206},{0,0}}, {{'ns_1@10.242.238.88',207},{0,0}}, {{'ns_1@10.242.238.88',207},{0,0}}, {{'ns_1@10.242.238.88',208},{0,0}}, {{'ns_1@10.242.238.88',208},{0,0}}, {{'ns_1@10.242.238.88',209},{0,0}}, {{'ns_1@10.242.238.88',209},{0,0}}, {{'ns_1@10.242.238.88',210},{0,0}}, {{'ns_1@10.242.238.88',210},{0,0}}, {{'ns_1@10.242.238.88',211},{0,0}}, {{'ns_1@10.242.238.88',211},{0,0}}, {{'ns_1@10.242.238.88',212},{0,0}}, {{'ns_1@10.242.238.88',212},{0,0}}, {{'ns_1@10.242.238.88',213},{0,0}}, {{'ns_1@10.242.238.88',213},{0,0}}, {{'ns_1@10.242.238.88',214},{0,0}}, {{'ns_1@10.242.238.88',214},{0,0}}, {{'ns_1@10.242.238.88',215},{0,0}}, {{'ns_1@10.242.238.88',215},{0,0}}, {{'ns_1@10.242.238.88',216},{0,0}}, {{'ns_1@10.242.238.88',216},{0,0}}, {{'ns_1@10.242.238.88',217},{0,0}}, {{'ns_1@10.242.238.88',217},{0,0}}, {{'ns_1@10.242.238.88',218},{0,0}}, {{'ns_1@10.242.238.88',218},{0,0}}, {{'ns_1@10.242.238.88',219},{0,0}}, {{'ns_1@10.242.238.88',219},{0,0}}, {{'ns_1@10.242.238.88',220},{0,0}}, {{'ns_1@10.242.238.88',220},{0,0}}, {{'ns_1@10.242.238.88',221},{0,0}}, {{'ns_1@10.242.238.88',221},{0,0}}, {{'ns_1@10.242.238.88',222},{0,0}}, {{'ns_1@10.242.238.88',222},{0,0}}, {{'ns_1@10.242.238.88',223},{0,0}}, {{'ns_1@10.242.238.88',223},{0,0}}, {{'ns_1@10.242.238.88',224},{0,0}}, {{'ns_1@10.242.238.88',224},{0,0}}, {{'ns_1@10.242.238.88',225},{0,0}}, {{'ns_1@10.242.238.88',225},{0,0}}, {{'ns_1@10.242.238.88',226},{0,0}}, {{'ns_1@10.242.238.88',226},{0,0}}, {{'ns_1@10.242.238.88',227},{0,0}}, {{'ns_1@10.242.238.88',227},{0,0}}, {{'ns_1@10.242.238.88',228},{0,0}}, {{'ns_1@10.242.238.88',228},{0,0}}, {{'ns_1@10.242.238.88',229},{0,0}}, {{'ns_1@10.242.238.88',229},{0,0}}, {{'ns_1@10.242.238.88',230},{0,0}}, {{'ns_1@10.242.238.88',230},{0,0}}, {{'ns_1@10.242.238.88',231},{0,0}}, {{'ns_1@10.242.238.88',231},{0,0}}, {{'ns_1@10.242.238.88',232},{0,0}}, {{'ns_1@10.242.238.88',232},{0,0}}, {{'ns_1@10.242.238.88',233},{0,0}}, {{'ns_1@10.242.238.88',233},{0,0}}, {{'ns_1@10.242.238.88',234},{0,0}}, {{'ns_1@10.242.238.88',234},{0,0}}, {{'ns_1@10.242.238.88',235},{0,0}}, {{'ns_1@10.242.238.88',235},{0,0}}, {{'ns_1@10.242.238.88',236},{0,0}}, {{'ns_1@10.242.238.88',236},{0,0}}, {{'ns_1@10.242.238.88',237},{0,0}}, {{'ns_1@10.242.238.88',237},{0,0}}, {{'ns_1@10.242.238.88',238},{0,0}}, {{'ns_1@10.242.238.88',238},{0,0}}, {{'ns_1@10.242.238.88',239},{0,0}}, {{'ns_1@10.242.238.88',239},{0,0}}, {{'ns_1@10.242.238.88',240},{0,0}}, {{'ns_1@10.242.238.88',240},{0,0}}, {{'ns_1@10.242.238.88',241},{0,0}}, {{'ns_1@10.242.238.88',241},{0,0}}, {{'ns_1@10.242.238.88',242},{0,0}}, {{'ns_1@10.242.238.88',242},{0,0}}, {{'ns_1@10.242.238.88',243},{0,0}}, {{'ns_1@10.242.238.88',243},{0,0}}, {{'ns_1@10.242.238.88',244},{0,0}}, {{'ns_1@10.242.238.88',244},{0,0}}, {{'ns_1@10.242.238.88',245},{0,0}}, {{'ns_1@10.242.238.88',245},{0,0}}, {{'ns_1@10.242.238.88',246},{0,0}}, {{'ns_1@10.242.238.88',246},{0,0}}, {{'ns_1@10.242.238.88',247},{0,0}}, {{'ns_1@10.242.238.88',247},{0,0}}, {{'ns_1@10.242.238.88',248},{0,0}}, {{'ns_1@10.242.238.88',248},{0,0}}, {{'ns_1@10.242.238.88',249},{0,0}}, {{'ns_1@10.242.238.88',249},{0,0}}, {{'ns_1@10.242.238.88',250},{0,0}}, {{'ns_1@10.242.238.88',250},{0,0}}, {{'ns_1@10.242.238.88',251},{0,0}}, {{'ns_1@10.242.238.88',251},{0,0}}, {{'ns_1@10.242.238.88',252},{0,0}}, {{'ns_1@10.242.238.88',252},{0,0}}, {{'ns_1@10.242.238.88',253},{0,0}}, {{'ns_1@10.242.238.88',253},{0,0}}, {{'ns_1@10.242.238.88',254},{0,0}}, {{'ns_1@10.242.238.88',254},{0,0}}, {{'ns_1@10.242.238.88',255},{0,0}}, {{'ns_1@10.242.238.88',255},{0,0}}, {{'ns_1@10.242.238.88',256},{0,0}}, {{'ns_1@10.242.238.88',256},{0,0}}, {{'ns_1@10.242.238.88',257},{0,0}}, {{'ns_1@10.242.238.88',257},{0,0}}, {{'ns_1@10.242.238.88',258},{0,0}}, {{'ns_1@10.242.238.88',258},{0,0}}, {{'ns_1@10.242.238.88',259},{0,0}}, {{'ns_1@10.242.238.88',259},{0,0}}, {{'ns_1@10.242.238.88',260},{0,0}}, {{'ns_1@10.242.238.88',260},{0,0}}, {{'ns_1@10.242.238.88',261},{0,0}}, {{'ns_1@10.242.238.88',261},{0,0}}, {{'ns_1@10.242.238.88',262},{0,0}}, {{'ns_1@10.242.238.88',262},{0,0}}, {{'ns_1@10.242.238.88',263},{0,0}}, {{'ns_1@10.242.238.88',263},{0,0}}, {{'ns_1@10.242.238.88',264},{0,0}}, {{'ns_1@10.242.238.88',264},{0,0}}, {{'ns_1@10.242.238.88',265},{0,0}}, {{'ns_1@10.242.238.88',265},{0,0}}, {{'ns_1@10.242.238.88',266},{0,0}}, {{'ns_1@10.242.238.88',266},{0,0}}, {{'ns_1@10.242.238.88',267},{0,0}}, {{'ns_1@10.242.238.88',267},{0,0}}, {{'ns_1@10.242.238.88',268},{0,0}}, {{'ns_1@10.242.238.88',268},{0,0}}, {{'ns_1@10.242.238.88',269},{0,0}}, {{'ns_1@10.242.238.88',269},{0,0}}, {{'ns_1@10.242.238.88',270},{0,0}}, {{'ns_1@10.242.238.88',270},{0,0}}, {{'ns_1@10.242.238.88',271},{0,0}}, {{'ns_1@10.242.238.88',271},{0,0}}, {{'ns_1@10.242.238.88',272},{0,0}}, {{'ns_1@10.242.238.88',272},{0,0}}, {{'ns_1@10.242.238.88',273},{0,0}}, {{'ns_1@10.242.238.88',273},{0,0}}, {{'ns_1@10.242.238.88',274},{0,0}}, {{'ns_1@10.242.238.88',274},{0,0}}, {{'ns_1@10.242.238.88',275},{0,0}}, {{'ns_1@10.242.238.88',275},{0,0}}, {{'ns_1@10.242.238.88',276},{0,0}}, {{'ns_1@10.242.238.88',276},{0,0}}, {{'ns_1@10.242.238.88',277},{0,0}}, {{'ns_1@10.242.238.88',277},{0,0}}, {{'ns_1@10.242.238.88',278},{0,0}}, {{'ns_1@10.242.238.88',278},{0,0}}, {{'ns_1@10.242.238.88',279},{0,0}}, {{'ns_1@10.242.238.88',279},{0,0}}, {{'ns_1@10.242.238.88',280},{0,0}}, {{'ns_1@10.242.238.88',280},{0,0}}, {{'ns_1@10.242.238.88',281},{0,0}}, {{'ns_1@10.242.238.88',281},{0,0}}, {{'ns_1@10.242.238.88',282},{0,0}}, {{'ns_1@10.242.238.88',282},{0,0}}, {{'ns_1@10.242.238.88',283},{0,0}}, {{'ns_1@10.242.238.88',283},{0,0}}, {{'ns_1@10.242.238.88',284},{0,0}}, {{'ns_1@10.242.238.88',284},{0,0}}, {{'ns_1@10.242.238.88',285},{0,0}}, {{'ns_1@10.242.238.88',285},{0,0}}, {{'ns_1@10.242.238.88',286},{0,0}}, {{'ns_1@10.242.238.88',286},{0,0}}, {{'ns_1@10.242.238.88',287},{0,0}}, {{'ns_1@10.242.238.88',287},{0,0}}, {{'ns_1@10.242.238.88',288},{0,0}}, {{'ns_1@10.242.238.88',288},{0,0}}, {{'ns_1@10.242.238.88',289},{0,0}}, {{'ns_1@10.242.238.88',289},{0,0}}, {{'ns_1@10.242.238.88',290},{0,0}}, {{'ns_1@10.242.238.88',290},{0,0}}, {{'ns_1@10.242.238.88',291},{0,0}}, {{'ns_1@10.242.238.88',291},{0,0}}, {{'ns_1@10.242.238.88',292},{0,0}}, {{'ns_1@10.242.238.88',292},{0,0}}, {{'ns_1@10.242.238.88',293},{0,0}}, {{'ns_1@10.242.238.88',293},{0,0}}, {{'ns_1@10.242.238.88',294},{0,0}}, {{'ns_1@10.242.238.88',294},{0,0}}, {{'ns_1@10.242.238.88',295},{0,0}}, {{'ns_1@10.242.238.88',295},{0,0}}, {{'ns_1@10.242.238.88',296},{0,0}}, {{'ns_1@10.242.238.88',296},{0,0}}, {{'ns_1@10.242.238.88',297},{0,0}}, {{'ns_1@10.242.238.88',297},{0,0}}, {{'ns_1@10.242.238.88',298},{0,0}}, {{'ns_1@10.242.238.88',298},{0,0}}, {{'ns_1@10.242.238.88',299},{0,0}}, {{'ns_1@10.242.238.88',299},{0,0}}, {{'ns_1@10.242.238.88',300},{0,0}}, {{'ns_1@10.242.238.88',300},{0,0}}, {{'ns_1@10.242.238.88',301},{0,0}}, {{'ns_1@10.242.238.88',301},{0,0}}, {{'ns_1@10.242.238.88',302},{0,0}}, {{'ns_1@10.242.238.88',302},{0,0}}, {{'ns_1@10.242.238.88',303},{0,0}}, {{'ns_1@10.242.238.88',303},{0,0}}, {{'ns_1@10.242.238.88',304},{0,0}}, {{'ns_1@10.242.238.88',304},{0,0}}, {{'ns_1@10.242.238.88',305},{0,0}}, {{'ns_1@10.242.238.88',305},{0,0}}, {{'ns_1@10.242.238.88',306},{0,0}}, {{'ns_1@10.242.238.88',306},{0,0}}, {{'ns_1@10.242.238.88',307},{0,0}}, {{'ns_1@10.242.238.88',307},{0,0}}, {{'ns_1@10.242.238.88',308},{0,0}}, {{'ns_1@10.242.238.88',308},{0,0}}, {{'ns_1@10.242.238.88',309},{0,0}}, {{'ns_1@10.242.238.88',309},{0,0}}, {{'ns_1@10.242.238.88',310},{0,0}}, {{'ns_1@10.242.238.88',310},{0,0}}, {{'ns_1@10.242.238.88',311},{0,0}}, {{'ns_1@10.242.238.88',311},{0,0}}, {{'ns_1@10.242.238.88',312},{0,0}}, {{'ns_1@10.242.238.88',312},{0,0}}, {{'ns_1@10.242.238.88',313},{0,0}}, {{'ns_1@10.242.238.88',313},{0,0}}, {{'ns_1@10.242.238.88',314},{0,0}}, {{'ns_1@10.242.238.88',314},{0,0}}, {{'ns_1@10.242.238.88',315},{0,0}}, {{'ns_1@10.242.238.88',315},{0,0}}, {{'ns_1@10.242.238.88',316},{0,0}}, {{'ns_1@10.242.238.88',316},{0,0}}, {{'ns_1@10.242.238.88',317},{0,0}}, {{'ns_1@10.242.238.88',317},{0,0}}, {{'ns_1@10.242.238.88',318},{0,0}}, {{'ns_1@10.242.238.88',318},{0,0}}, {{'ns_1@10.242.238.88',319},{0,0}}, {{'ns_1@10.242.238.88',319},{0,0}}, {{'ns_1@10.242.238.88',320},{0,0}}, {{'ns_1@10.242.238.88',320},{0,0}}, {{'ns_1@10.242.238.88',321},{0,0}}, {{'ns_1@10.242.238.88',321},{0,0}}, {{'ns_1@10.242.238.88',322},{0,0}}, {{'ns_1@10.242.238.88',322},{0,0}}, {{'ns_1@10.242.238.88',323},{0,0}}, {{'ns_1@10.242.238.88',323},{0,0}}, {{'ns_1@10.242.238.88',324},{0,0}}, {{'ns_1@10.242.238.88',324},{0,0}}, {{'ns_1@10.242.238.88',325},{0,0}}, {{'ns_1@10.242.238.88',325},{0,0}}, {{'ns_1@10.242.238.88',326},{0,0}}, {{'ns_1@10.242.238.88',326},{0,0}}, {{'ns_1@10.242.238.88',327},{0,0}}, {{'ns_1@10.242.238.88',327},{0,0}}, {{'ns_1@10.242.238.88',328},{0,0}}, {{'ns_1@10.242.238.88',328},{0,0}}, {{'ns_1@10.242.238.88',329},{0,0}}, {{'ns_1@10.242.238.88',329},{0,0}}, {{'ns_1@10.242.238.88',330},{0,0}}, {{'ns_1@10.242.238.88',330},{0,0}}, {{'ns_1@10.242.238.88',331},{0,0}}, {{'ns_1@10.242.238.88',331},{0,0}}, {{'ns_1@10.242.238.88',332},{0,0}}, {{'ns_1@10.242.238.88',332},{0,0}}, {{'ns_1@10.242.238.88',333},{0,0}}, {{'ns_1@10.242.238.88',333},{0,0}}, {{'ns_1@10.242.238.88',334},{0,0}}, {{'ns_1@10.242.238.88',334},{0,0}}, {{'ns_1@10.242.238.88',335},{0,0}}, {{'ns_1@10.242.238.88',335},{0,0}}, {{'ns_1@10.242.238.88',336},{0,0}}, {{'ns_1@10.242.238.88',336},{0,0}}, {{'ns_1@10.242.238.88',337},{0,0}}, {{'ns_1@10.242.238.88',337},{0,0}}, {{'ns_1@10.242.238.88',338},{0,0}}, {{'ns_1@10.242.238.88',338},{0,0}}, {{'ns_1@10.242.238.88',339},{0,0}}, {{'ns_1@10.242.238.88',339},{0,0}}, {{'ns_1@10.242.238.88',340},{0,0}}, {{'ns_1@10.242.238.88',340},{0,0}}, {{'ns_1@10.242.238.88',341},{0,0}}, {{'ns_1@10.242.238.88',341},{0,0}}, {{'ns_1@10.242.238.88',342},{0,0}}, {{'ns_1@10.242.238.88',343},{0,0}}, {{'ns_1@10.242.238.88',344},{0,0}}, {{'ns_1@10.242.238.88',345},{0,0}}, {{'ns_1@10.242.238.88',346},{0,0}}, {{'ns_1@10.242.238.88',347},{0,0}}, {{'ns_1@10.242.238.88',348},{0,0}}, {{'ns_1@10.242.238.88',349},{0,0}}, {{'ns_1@10.242.238.88',350},{0,0}}, {{'ns_1@10.242.238.88',351},{0,0}}, {{'ns_1@10.242.238.88',352},{0,0}}, {{'ns_1@10.242.238.88',353},{0,0}}, {{'ns_1@10.242.238.88',354},{0,0}}, {{'ns_1@10.242.238.88',355},{0,0}}, {{'ns_1@10.242.238.88',356},{0,0}}, {{'ns_1@10.242.238.88',357},{0,0}}, {{'ns_1@10.242.238.88',358},{0,0}}, {{'ns_1@10.242.238.88',359},{0,0}}, {{'ns_1@10.242.238.88',360},{0,0}}, {{'ns_1@10.242.238.88',361},{0,0}}, {{'ns_1@10.242.238.88',362},{0,0}}, {{'ns_1@10.242.238.88',363},{0,0}}, {{'ns_1@10.242.238.88',364},{0,0}}, {{'ns_1@10.242.238.88',365},{0,0}}, {{'ns_1@10.242.238.88',366},{0,0}}, {{'ns_1@10.242.238.88',367},{0,0}}, {{'ns_1@10.242.238.88',368},{0,0}}, {{'ns_1@10.242.238.88',369},{0,0}}, {{'ns_1@10.242.238.88',370},{0,0}}, {{'ns_1@10.242.238.88',371},{0,0}}, {{'ns_1@10.242.238.88',372},{0,0}}, {{'ns_1@10.242.238.88',373},{0,0}}, {{'ns_1@10.242.238.88',374},{0,0}}, {{'ns_1@10.242.238.88',375},{0,0}}, {{'ns_1@10.242.238.88',376},{0,0}}, {{'ns_1@10.242.238.88',377},{0,0}}, {{'ns_1@10.242.238.88',378},{0,0}}, {{'ns_1@10.242.238.88',379},{0,0}}, {{'ns_1@10.242.238.88',380},{0,0}}, {{'ns_1@10.242.238.88',381},{0,0}}, {{'ns_1@10.242.238.88',382},{0,0}}, {{'ns_1@10.242.238.88',383},{0,0}}, {{'ns_1@10.242.238.88',384},{0,0}}, {{'ns_1@10.242.238.88',385},{0,0}}, {{'ns_1@10.242.238.88',386},{0,0}}, {{'ns_1@10.242.238.88',387},{0,0}}, {{'ns_1@10.242.238.88',388},{0,0}}, {{'ns_1@10.242.238.88',389},{0,0}}, {{'ns_1@10.242.238.88',390},{0,0}}, {{'ns_1@10.242.238.88',391},{0,0}}, {{'ns_1@10.242.238.88',392},{0,0}}, {{'ns_1@10.242.238.88',393},{0,0}}, {{'ns_1@10.242.238.88',394},{0,0}}, {{'ns_1@10.242.238.88',395},{0,0}}, {{'ns_1@10.242.238.88',396},{0,0}}, {{'ns_1@10.242.238.88',397},{0,0}}, {{'ns_1@10.242.238.88',398},{0,0}}, {{'ns_1@10.242.238.88',399},{0,0}}, {{'ns_1@10.242.238.88',400},{0,0}}, {{'ns_1@10.242.238.88',401},{0,0}}, {{'ns_1@10.242.238.88',402},{0,0}}, {{'ns_1@10.242.238.88',403},{0,0}}, {{'ns_1@10.242.238.88',404},{0,0}}, {{'ns_1@10.242.238.88',405},{0,0}}, {{'ns_1@10.242.238.88',406},{0,0}}, {{'ns_1@10.242.238.88',407},{0,0}}, {{'ns_1@10.242.238.88',408},{0,0}}, {{'ns_1@10.242.238.88',409},{0,0}}, {{'ns_1@10.242.238.88',410},{0,0}}, {{'ns_1@10.242.238.88',411},{0,0}}, {{'ns_1@10.242.238.88',412},{0,0}}, {{'ns_1@10.242.238.88',413},{0,0}}, {{'ns_1@10.242.238.88',414},{0,0}}, {{'ns_1@10.242.238.88',415},{0,0}}, {{'ns_1@10.242.238.88',416},{0,0}}, {{'ns_1@10.242.238.88',417},{0,0}}, {{'ns_1@10.242.238.88',418},{0,0}}, {{'ns_1@10.242.238.88',419},{0,0}}, {{'ns_1@10.242.238.88',420},{0,0}}, {{'ns_1@10.242.238.88',421},{0,0}}, {{'ns_1@10.242.238.88',422},{0,0}}, {{'ns_1@10.242.238.88',423},{0,0}}, {{'ns_1@10.242.238.88',424},{0,0}}, {{'ns_1@10.242.238.88',425},{0,0}}, {{'ns_1@10.242.238.88',426},{0,0}}, {{'ns_1@10.242.238.88',427},{0,0}}, {{'ns_1@10.242.238.88',428},{0,0}}, {{'ns_1@10.242.238.88',429},{0,0}}, {{'ns_1@10.242.238.88',430},{0,0}}, {{'ns_1@10.242.238.88',431},{0,0}}, {{'ns_1@10.242.238.88',432},{0,0}}, {{'ns_1@10.242.238.88',433},{0,0}}, {{'ns_1@10.242.238.88',434},{0,0}}, {{'ns_1@10.242.238.88',435},{0,0}}, {{'ns_1@10.242.238.88',436},{0,0}}, {{'ns_1@10.242.238.88',437},{0,0}}, {{'ns_1@10.242.238.88',438},{0,0}}, {{'ns_1@10.242.238.88',439},{0,0}}, {{'ns_1@10.242.238.88',440},{0,0}}, {{'ns_1@10.242.238.88',441},{0,0}}, {{'ns_1@10.242.238.88',442},{0,0}}, {{'ns_1@10.242.238.88',443},{0,0}}, {{'ns_1@10.242.238.88',444},{0,0}}, {{'ns_1@10.242.238.88',445},{0,0}}, {{'ns_1@10.242.238.88',446},{0,0}}, {{'ns_1@10.242.238.88',447},{0,0}}, {{'ns_1@10.242.238.88',448},{0,0}}, {{'ns_1@10.242.238.88',449},{0,0}}, {{'ns_1@10.242.238.88',450},{0,0}}, {{'ns_1@10.242.238.88',451},{0,0}}, {{'ns_1@10.242.238.88',452},{0,0}}, {{'ns_1@10.242.238.88',453},{0,0}}, {{'ns_1@10.242.238.88',454},{0,0}}, {{'ns_1@10.242.238.88',455},{0,0}}, {{'ns_1@10.242.238.88',456},{0,0}}, {{'ns_1@10.242.238.88',457},{0,0}}, {{'ns_1@10.242.238.88',458},{0,0}}, {{'ns_1@10.242.238.88',459},{0,0}}, {{'ns_1@10.242.238.88',460},{0,0}}, {{'ns_1@10.242.238.88',461},{0,0}}, {{'ns_1@10.242.238.88',462},{0,0}}, {{'ns_1@10.242.238.88',463},{0,0}}, {{'ns_1@10.242.238.88',464},{0,0}}, {{'ns_1@10.242.238.88',465},{0,0}}, {{'ns_1@10.242.238.88',466},{0,0}}, {{'ns_1@10.242.238.88',467},{0,0}}, {{'ns_1@10.242.238.88',468},{0,0}}, {{'ns_1@10.242.238.88',469},{0,0}}, {{'ns_1@10.242.238.88',470},{0,0}}, {{'ns_1@10.242.238.88',471},{0,0}}, {{'ns_1@10.242.238.88',472},{0,0}}, {{'ns_1@10.242.238.88',473},{0,0}}, {{'ns_1@10.242.238.88',474},{0,0}}, {{'ns_1@10.242.238.88',475},{0,0}}, {{'ns_1@10.242.238.88',476},{0,0}}, {{'ns_1@10.242.238.88',477},{0,0}}, {{'ns_1@10.242.238.88',478},{0,0}}, {{'ns_1@10.242.238.88',479},{0,0}}, {{'ns_1@10.242.238.88',480},{0,0}}, {{'ns_1@10.242.238.88',481},{0,0}}, {{'ns_1@10.242.238.88',482},{0,0}}, {{'ns_1@10.242.238.88',483},{0,0}}, {{'ns_1@10.242.238.88',484},{0,0}}, {{'ns_1@10.242.238.88',485},{0,0}}, {{'ns_1@10.242.238.88',486},{0,0}}, {{'ns_1@10.242.238.88',487},{0,0}}, {{'ns_1@10.242.238.88',488},{0,0}}, {{'ns_1@10.242.238.88',489},{0,0}}, {{'ns_1@10.242.238.88',490},{0,0}}, {{'ns_1@10.242.238.88',491},{0,0}}, {{'ns_1@10.242.238.88',492},{0,0}}, {{'ns_1@10.242.238.88',493},{0,0}}, {{'ns_1@10.242.238.88',494},{0,0}}, {{'ns_1@10.242.238.88',495},{0,0}}, {{'ns_1@10.242.238.88',496},{0,0}}, {{'ns_1@10.242.238.88',497},{0,0}}, {{'ns_1@10.242.238.88',498},{0,0}}, {{'ns_1@10.242.238.88',499},{0,0}}, {{'ns_1@10.242.238.88',500},{0,0}}, {{'ns_1@10.242.238.88',501},{0,0}}, {{'ns_1@10.242.238.88',502},{0,0}}, {{'ns_1@10.242.238.88',503},{0,0}}, {{'ns_1@10.242.238.88',504},{0,0}}, {{'ns_1@10.242.238.88',505},{0,0}}, {{'ns_1@10.242.238.88',506},{0,0}}, {{'ns_1@10.242.238.88',507},{0,0}}, {{'ns_1@10.242.238.88',508},{0,0}}, {{'ns_1@10.242.238.88',509},{0,0}}, {{'ns_1@10.242.238.88',510},{0,0}}, {{'ns_1@10.242.238.88',511},{0,0}}, {{'ns_1@10.242.238.88',512},{0,0}}, {{'ns_1@10.242.238.88',512},{0,0}}, {{'ns_1@10.242.238.88',513},{0,0}}, {{'ns_1@10.242.238.88',513},{0,0}}, {{'ns_1@10.242.238.88',514},{0,0}}, {{'ns_1@10.242.238.88',514},{0,0}}, {{'ns_1@10.242.238.88',515},{0,0}}, {{'ns_1@10.242.238.88',515},{0,0}}, {{'ns_1@10.242.238.88',516},{0,0}}, {{'ns_1@10.242.238.88',516},{0,0}}, {{'ns_1@10.242.238.88',517},{0,0}}, {{'ns_1@10.242.238.88',517},{0,0}}, {{'ns_1@10.242.238.88',518},{0,0}}, {{'ns_1@10.242.238.88',518},{0,0}}, {{'ns_1@10.242.238.88',519},{0,0}}, {{'ns_1@10.242.238.88',519},{0,0}}, {{'ns_1@10.242.238.88',520},{0,0}}, {{'ns_1@10.242.238.88',520},{0,0}}, {{'ns_1@10.242.238.88',521},{0,0}}, {{'ns_1@10.242.238.88',521},{0,0}}, {{'ns_1@10.242.238.88',522},{0,0}}, {{'ns_1@10.242.238.88',522},{0,0}}, {{'ns_1@10.242.238.88',523},{0,0}}, {{'ns_1@10.242.238.88',523},{0,0}}, {{'ns_1@10.242.238.88',524},{0,0}}, {{'ns_1@10.242.238.88',524},{0,0}}, {{'ns_1@10.242.238.88',525},{0,0}}, {{'ns_1@10.242.238.88',525},{0,0}}, {{'ns_1@10.242.238.88',526},{0,0}}, {{'ns_1@10.242.238.88',526},{0,0}}, {{'ns_1@10.242.238.88',527},{0,0}}, {{'ns_1@10.242.238.88',527},{0,0}}, {{'ns_1@10.242.238.88',528},{0,0}}, {{'ns_1@10.242.238.88',528},{0,0}}, {{'ns_1@10.242.238.88',529},{0,0}}, {{'ns_1@10.242.238.88',529},{0,0}}, {{'ns_1@10.242.238.88',530},{0,0}}, {{'ns_1@10.242.238.88',530},{0,0}}, {{'ns_1@10.242.238.88',531},{0,0}}, {{'ns_1@10.242.238.88',531},{0,0}}, {{'ns_1@10.242.238.88',532},{0,0}}, {{'ns_1@10.242.238.88',532},{0,0}}, {{'ns_1@10.242.238.88',533},{0,0}}, {{'ns_1@10.242.238.88',533},{0,0}}, {{'ns_1@10.242.238.88',534},{0,0}}, {{'ns_1@10.242.238.88',534},{0,0}}, {{'ns_1@10.242.238.88',535},{0,0}}, {{'ns_1@10.242.238.88',535},{0,0}}, {{'ns_1@10.242.238.88',536},{0,0}}, {{'ns_1@10.242.238.88',536},{0,0}}, {{'ns_1@10.242.238.88',537},{0,0}}, {{'ns_1@10.242.238.88',537},{0,0}}, {{'ns_1@10.242.238.88',538},{0,0}}, {{'ns_1@10.242.238.88',538},{0,0}}, {{'ns_1@10.242.238.88',539},{0,0}}, {{'ns_1@10.242.238.88',539},{0,0}}, {{'ns_1@10.242.238.88',540},{0,0}}, {{'ns_1@10.242.238.88',540},{0,0}}, {{'ns_1@10.242.238.88',541},{0,0}}, {{'ns_1@10.242.238.88',541},{0,0}}, {{'ns_1@10.242.238.88',542},{0,0}}, {{'ns_1@10.242.238.88',542},{0,0}}, {{'ns_1@10.242.238.88',543},{0,0}}, {{'ns_1@10.242.238.88',543},{0,0}}, {{'ns_1@10.242.238.88',544},{0,0}}, {{'ns_1@10.242.238.88',544},{0,0}}, {{'ns_1@10.242.238.88',545},{0,0}}, {{'ns_1@10.242.238.88',545},{0,0}}, {{'ns_1@10.242.238.88',546},{0,0}}, {{'ns_1@10.242.238.88',546},{0,0}}, {{'ns_1@10.242.238.88',547},{0,0}}, {{'ns_1@10.242.238.88',547},{0,0}}, {{'ns_1@10.242.238.88',548},{0,0}}, {{'ns_1@10.242.238.88',548},{0,0}}, {{'ns_1@10.242.238.88',549},{0,0}}, {{'ns_1@10.242.238.88',549},{0,0}}, {{'ns_1@10.242.238.88',550},{0,0}}, {{'ns_1@10.242.238.88',550},{0,0}}, {{'ns_1@10.242.238.88',551},{0,0}}, {{'ns_1@10.242.238.88',551},{0,0}}, {{'ns_1@10.242.238.88',552},{0,0}}, {{'ns_1@10.242.238.88',552},{0,0}}, {{'ns_1@10.242.238.88',553},{0,0}}, {{'ns_1@10.242.238.88',553},{0,0}}, {{'ns_1@10.242.238.88',554},{0,0}}, {{'ns_1@10.242.238.88',554},{0,0}}, {{'ns_1@10.242.238.88',555},{0,0}}, {{'ns_1@10.242.238.88',555},{0,0}}, {{'ns_1@10.242.238.88',556},{0,0}}, {{'ns_1@10.242.238.88',556},{0,0}}, {{'ns_1@10.242.238.88',557},{0,0}}, {{'ns_1@10.242.238.88',557},{0,0}}, {{'ns_1@10.242.238.88',558},{0,0}}, {{'ns_1@10.242.238.88',558},{0,0}}, {{'ns_1@10.242.238.88',559},{0,0}}, {{'ns_1@10.242.238.88',559},{0,0}}, {{'ns_1@10.242.238.88',560},{0,0}}, {{'ns_1@10.242.238.88',560},{0,0}}, {{'ns_1@10.242.238.88',561},{0,0}}, {{'ns_1@10.242.238.88',561},{0,0}}, {{'ns_1@10.242.238.88',562},{0,0}}, {{'ns_1@10.242.238.88',562},{0,0}}, {{'ns_1@10.242.238.88',563},{0,0}}, {{'ns_1@10.242.238.88',563},{0,0}}, {{'ns_1@10.242.238.88',564},{0,0}}, {{'ns_1@10.242.238.88',564},{0,0}}, {{'ns_1@10.242.238.88',565},{0,0}}, {{'ns_1@10.242.238.88',565},{0,0}}, {{'ns_1@10.242.238.88',566},{0,0}}, {{'ns_1@10.242.238.88',566},{0,0}}, {{'ns_1@10.242.238.88',567},{0,0}}, {{'ns_1@10.242.238.88',567},{0,0}}, {{'ns_1@10.242.238.88',568},{0,0}}, {{'ns_1@10.242.238.88',568},{0,0}}, {{'ns_1@10.242.238.88',569},{0,0}}, {{'ns_1@10.242.238.88',569},{0,0}}, {{'ns_1@10.242.238.88',570},{0,0}}, {{'ns_1@10.242.238.88',570},{0,0}}, {{'ns_1@10.242.238.88',571},{0,0}}, {{'ns_1@10.242.238.88',571},{0,0}}, {{'ns_1@10.242.238.88',572},{0,0}}, {{'ns_1@10.242.238.88',572},{0,0}}, {{'ns_1@10.242.238.88',573},{0,0}}, {{'ns_1@10.242.238.88',573},{0,0}}, {{'ns_1@10.242.238.88',574},{0,0}}, {{'ns_1@10.242.238.88',574},{0,0}}, {{'ns_1@10.242.238.88',575},{0,0}}, {{'ns_1@10.242.238.88',575},{0,0}}, {{'ns_1@10.242.238.88',576},{0,0}}, {{'ns_1@10.242.238.88',576},{0,0}}, {{'ns_1@10.242.238.88',577},{0,0}}, {{'ns_1@10.242.238.88',577},{0,0}}, {{'ns_1@10.242.238.88',578},{0,0}}, {{'ns_1@10.242.238.88',578},{0,0}}, {{'ns_1@10.242.238.88',579},{0,0}}, {{'ns_1@10.242.238.88',579},{0,0}}, {{'ns_1@10.242.238.88',580},{0,0}}, {{'ns_1@10.242.238.88',580},{0,0}}, {{'ns_1@10.242.238.88',581},{0,0}}, {{'ns_1@10.242.238.88',581},{0,0}}, {{'ns_1@10.242.238.88',582},{0,0}}, {{'ns_1@10.242.238.88',582},{0,0}}, {{'ns_1@10.242.238.88',583},{0,0}}, {{'ns_1@10.242.238.88',583},{0,0}}, {{'ns_1@10.242.238.88',584},{0,0}}, {{'ns_1@10.242.238.88',584},{0,0}}, {{'ns_1@10.242.238.88',585},{0,0}}, {{'ns_1@10.242.238.88',585},{0,0}}, {{'ns_1@10.242.238.88',586},{0,0}}, {{'ns_1@10.242.238.88',586},{0,0}}, {{'ns_1@10.242.238.88',587},{0,0}}, {{'ns_1@10.242.238.88',587},{0,0}}, {{'ns_1@10.242.238.88',588},{0,0}}, {{'ns_1@10.242.238.88',588},{0,0}}, {{'ns_1@10.242.238.88',589},{0,0}}, {{'ns_1@10.242.238.88',589},{0,0}}, {{'ns_1@10.242.238.88',590},{0,0}}, {{'ns_1@10.242.238.88',590},{0,0}}, {{'ns_1@10.242.238.88',591},{0,0}}, {{'ns_1@10.242.238.88',591},{0,0}}, {{'ns_1@10.242.238.88',592},{0,0}}, {{'ns_1@10.242.238.88',592},{0,0}}, {{'ns_1@10.242.238.88',593},{0,0}}, {{'ns_1@10.242.238.88',593},{0,0}}, {{'ns_1@10.242.238.88',594},{0,0}}, {{'ns_1@10.242.238.88',594},{0,0}}, {{'ns_1@10.242.238.88',595},{0,0}}, {{'ns_1@10.242.238.88',595},{0,0}}, {{'ns_1@10.242.238.88',596},{0,0}}, {{'ns_1@10.242.238.88',596},{0,0}}, {{'ns_1@10.242.238.88',597},{0,0}}, {{'ns_1@10.242.238.88',598},{0,0}}, {{'ns_1@10.242.238.88',599},{0,0}}, {{'ns_1@10.242.238.88',600},{0,0}}, {{'ns_1@10.242.238.88',601},{0,0}}, {{'ns_1@10.242.238.88',602},{0,0}}, {{'ns_1@10.242.238.88',603},{0,0}}, {{'ns_1@10.242.238.88',604},{0,0}}, {{'ns_1@10.242.238.88',605},{0,0}}, {{'ns_1@10.242.238.88',606},{0,0}}, {{'ns_1@10.242.238.88',607},{0,0}}, {{'ns_1@10.242.238.88',608},{0,0}}, {{'ns_1@10.242.238.88',609},{0,0}}, {{'ns_1@10.242.238.88',610},{0,0}}, {{'ns_1@10.242.238.88',611},{0,0}}, {{'ns_1@10.242.238.88',612},{0,0}}, {{'ns_1@10.242.238.88',613},{0,0}}, {{'ns_1@10.242.238.88',614},{0,0}}, {{'ns_1@10.242.238.88',615},{0,0}}, {{'ns_1@10.242.238.88',616},{0,0}}, {{'ns_1@10.242.238.88',617},{0,0}}, {{'ns_1@10.242.238.88',618},{0,0}}, {{'ns_1@10.242.238.88',619},{0,0}}, {{'ns_1@10.242.238.88',620},{0,0}}, {{'ns_1@10.242.238.88',621},{0,0}}, {{'ns_1@10.242.238.88',622},{0,0}}, {{'ns_1@10.242.238.88',623},{0,0}}, {{'ns_1@10.242.238.88',624},{0,0}}, {{'ns_1@10.242.238.88',625},{0,0}}, {{'ns_1@10.242.238.88',626},{0,0}}, {{'ns_1@10.242.238.88',627},{0,0}}, {{'ns_1@10.242.238.88',628},{0,0}}, {{'ns_1@10.242.238.88',629},{0,0}}, {{'ns_1@10.242.238.88',630},{0,0}}, {{'ns_1@10.242.238.88',631},{0,0}}, {{'ns_1@10.242.238.88',632},{0,0}}, {{'ns_1@10.242.238.88',633},{0,0}}, {{'ns_1@10.242.238.88',634},{0,0}}, {{'ns_1@10.242.238.88',635},{0,0}}, {{'ns_1@10.242.238.88',636},{0,0}}, {{'ns_1@10.242.238.88',637},{0,0}}, {{'ns_1@10.242.238.88',638},{0,0}}, {{'ns_1@10.242.238.88',639},{0,0}}, {{'ns_1@10.242.238.88',640},{0,0}}, {{'ns_1@10.242.238.88',641},{0,0}}, {{'ns_1@10.242.238.88',642},{0,0}}, {{'ns_1@10.242.238.88',643},{0,0}}, {{'ns_1@10.242.238.88',644},{0,0}}, {{'ns_1@10.242.238.88',645},{0,0}}, {{'ns_1@10.242.238.88',646},{0,0}}, {{'ns_1@10.242.238.88',647},{0,0}}, {{'ns_1@10.242.238.88',648},{0,0}}, {{'ns_1@10.242.238.88',649},{0,0}}, {{'ns_1@10.242.238.88',650},{0,0}}, {{'ns_1@10.242.238.88',651},{0,0}}, {{'ns_1@10.242.238.88',652},{0,0}}, {{'ns_1@10.242.238.88',653},{0,0}}, {{'ns_1@10.242.238.88',654},{0,0}}, {{'ns_1@10.242.238.88',655},{0,0}}, {{'ns_1@10.242.238.88',656},{0,0}}, {{'ns_1@10.242.238.88',657},{0,0}}, {{'ns_1@10.242.238.88',658},{0,0}}, {{'ns_1@10.242.238.88',659},{0,0}}, {{'ns_1@10.242.238.88',660},{0,0}}, {{'ns_1@10.242.238.88',661},{0,0}}, {{'ns_1@10.242.238.88',662},{0,0}}, {{'ns_1@10.242.238.88',663},{0,0}}, {{'ns_1@10.242.238.88',664},{0,0}}, {{'ns_1@10.242.238.88',665},{0,0}}, {{'ns_1@10.242.238.88',666},{0,0}}, {{'ns_1@10.242.238.88',667},{0,0}}, {{'ns_1@10.242.238.88',668},{0,0}}, {{'ns_1@10.242.238.88',669},{0,0}}, {{'ns_1@10.242.238.88',670},{0,0}}, {{'ns_1@10.242.238.88',671},{0,0}}, {{'ns_1@10.242.238.88',672},{0,0}}, {{'ns_1@10.242.238.88',673},{0,0}}, {{'ns_1@10.242.238.88',674},{0,0}}, {{'ns_1@10.242.238.88',675},{0,0}}, {{'ns_1@10.242.238.88',676},{0,0}}, {{'ns_1@10.242.238.88',677},{0,0}}, {{'ns_1@10.242.238.88',678},{0,0}}, {{'ns_1@10.242.238.88',679},{0,0}}, {{'ns_1@10.242.238.88',680},{0,0}}, {{'ns_1@10.242.238.88',681},{0,0}}, {{'ns_1@10.242.238.88',682},{0,0}}, {{'ns_1@10.242.238.88',683},{0,0}}, {{'ns_1@10.242.238.88',684},{0,0}}, {{'ns_1@10.242.238.88',685},{0,0}}, {{'ns_1@10.242.238.88',686},{0,0}}, {{'ns_1@10.242.238.88',687},{0,0}}, {{'ns_1@10.242.238.88',688},{0,0}}, {{'ns_1@10.242.238.88',689},{0,0}}, {{'ns_1@10.242.238.88',690},{0,0}}, {{'ns_1@10.242.238.88',691},{0,0}}, {{'ns_1@10.242.238.88',692},{0,0}}, {{'ns_1@10.242.238.88',693},{0,0}}, {{'ns_1@10.242.238.88',694},{0,0}}, {{'ns_1@10.242.238.88',695},{0,0}}, {{'ns_1@10.242.238.88',696},{0,0}}, {{'ns_1@10.242.238.88',697},{0,0}}, {{'ns_1@10.242.238.88',698},{0,0}}, {{'ns_1@10.242.238.88',699},{0,0}}, {{'ns_1@10.242.238.88',700},{0,0}}, {{'ns_1@10.242.238.88',701},{0,0}}, {{'ns_1@10.242.238.88',702},{0,0}}, {{'ns_1@10.242.238.88',703},{0,0}}, {{'ns_1@10.242.238.88',704},{0,0}}, {{'ns_1@10.242.238.88',705},{0,0}}, {{'ns_1@10.242.238.88',706},{0,0}}, {{'ns_1@10.242.238.88',707},{0,0}}, {{'ns_1@10.242.238.88',708},{0,0}}, {{'ns_1@10.242.238.88',709},{0,0}}, {{'ns_1@10.242.238.88',710},{0,0}}, {{'ns_1@10.242.238.88',711},{0,0}}, {{'ns_1@10.242.238.88',712},{0,0}}, {{'ns_1@10.242.238.88',713},{0,0}}, {{'ns_1@10.242.238.88',714},{0,0}}, {{'ns_1@10.242.238.88',715},{0,0}}, {{'ns_1@10.242.238.88',716},{0,0}}, {{'ns_1@10.242.238.88',717},{0,0}}, {{'ns_1@10.242.238.88',718},{0,0}}, {{'ns_1@10.242.238.88',719},{0,0}}, {{'ns_1@10.242.238.88',720},{0,0}}, {{'ns_1@10.242.238.88',721},{0,0}}, {{'ns_1@10.242.238.88',722},{0,0}}, {{'ns_1@10.242.238.88',723},{0,0}}, {{'ns_1@10.242.238.88',724},{0,0}}, {{'ns_1@10.242.238.88',725},{0,0}}, {{'ns_1@10.242.238.88',726},{0,0}}, {{'ns_1@10.242.238.88',727},{0,0}}, {{'ns_1@10.242.238.88',728},{0,0}}, {{'ns_1@10.242.238.88',729},{0,0}}, {{'ns_1@10.242.238.88',730},{0,0}}, {{'ns_1@10.242.238.88',731},{0,0}}, {{'ns_1@10.242.238.88',732},{0,0}}, {{'ns_1@10.242.238.88',733},{0,0}}, {{'ns_1@10.242.238.88',734},{0,0}}, {{'ns_1@10.242.238.88',735},{0,0}}, {{'ns_1@10.242.238.88',736},{0,0}}, {{'ns_1@10.242.238.88',737},{0,0}}, {{'ns_1@10.242.238.88',738},{0,0}}, {{'ns_1@10.242.238.88',739},{0,0}}, {{'ns_1@10.242.238.88',740},{0,0}}, {{'ns_1@10.242.238.88',741},{0,0}}, {{'ns_1@10.242.238.88',742},{0,0}}, {{'ns_1@10.242.238.88',743},{0,0}}, {{'ns_1@10.242.238.88',744},{0,0}}, {{'ns_1@10.242.238.88',745},{0,0}}, {{'ns_1@10.242.238.88',746},{0,0}}, {{'ns_1@10.242.238.88',747},{0,0}}, {{'ns_1@10.242.238.88',748},{0,0}}, {{'ns_1@10.242.238.88',749},{0,0}}, {{'ns_1@10.242.238.88',750},{0,0}}, {{'ns_1@10.242.238.88',751},{0,0}}, {{'ns_1@10.242.238.88',752},{0,0}}, {{'ns_1@10.242.238.88',753},{0,0}}, {{'ns_1@10.242.238.88',754},{0,0}}, {{'ns_1@10.242.238.88',755},{0,0}}, {{'ns_1@10.242.238.88',756},{0,0}}, {{'ns_1@10.242.238.88',757},{0,0}}, {{'ns_1@10.242.238.88',758},{0,0}}, {{'ns_1@10.242.238.88',759},{0,0}}, {{'ns_1@10.242.238.88',760},{0,0}}, {{'ns_1@10.242.238.88',761},{0,0}}, {{'ns_1@10.242.238.88',762},{0,0}}, {{'ns_1@10.242.238.88',763},{0,0}}, {{'ns_1@10.242.238.88',764},{0,0}}, {{'ns_1@10.242.238.88',765},{0,0}}, {{'ns_1@10.242.238.88',766},{0,0}}, {{'ns_1@10.242.238.88',767},{0,0}}, {{'ns_1@10.242.238.88',768},{0,0}}, {{'ns_1@10.242.238.88',768},{0,0}}, {{'ns_1@10.242.238.88',769},{0,0}}, {{'ns_1@10.242.238.88',769},{0,0}}, {{'ns_1@10.242.238.88',770},{0,0}}, {{'ns_1@10.242.238.88',770},{0,0}}, {{'ns_1@10.242.238.88',771},{0,0}}, {{'ns_1@10.242.238.88',771},{0,0}}, {{'ns_1@10.242.238.88',772},{0,0}}, {{'ns_1@10.242.238.88',772},{0,0}}, {{'ns_1@10.242.238.88',773},{0,0}}, {{'ns_1@10.242.238.88',773},{0,0}}, {{'ns_1@10.242.238.88',774},{0,0}}, {{'ns_1@10.242.238.88',774},{0,0}}, {{'ns_1@10.242.238.88',775},{0,0}}, {{'ns_1@10.242.238.88',775},{0,0}}, {{'ns_1@10.242.238.88',776},{0,0}}, {{'ns_1@10.242.238.88',776},{0,0}}, {{'ns_1@10.242.238.88',777},{0,0}}, {{'ns_1@10.242.238.88',777},{0,0}}, {{'ns_1@10.242.238.88',778},{0,0}}, {{'ns_1@10.242.238.88',778},{0,0}}, {{'ns_1@10.242.238.88',779},{0,0}}, {{'ns_1@10.242.238.88',779},{0,0}}, {{'ns_1@10.242.238.88',780},{0,0}}, {{'ns_1@10.242.238.88',780},{0,0}}, {{'ns_1@10.242.238.88',781},{0,0}}, {{'ns_1@10.242.238.88',781},{0,0}}, {{'ns_1@10.242.238.88',782},{0,0}}, {{'ns_1@10.242.238.88',782},{0,0}}, {{'ns_1@10.242.238.88',783},{0,0}}, {{'ns_1@10.242.238.88',783},{0,0}}, {{'ns_1@10.242.238.88',784},{0,0}}, {{'ns_1@10.242.238.88',784},{0,0}}, {{'ns_1@10.242.238.88',785},{0,0}}, {{'ns_1@10.242.238.88',785},{0,0}}, {{'ns_1@10.242.238.88',786},{0,0}}, {{'ns_1@10.242.238.88',786},{0,0}}, {{'ns_1@10.242.238.88',787},{0,0}}, {{'ns_1@10.242.238.88',787},{0,0}}, {{'ns_1@10.242.238.88',788},{0,0}}, {{'ns_1@10.242.238.88',788},{0,0}}, {{'ns_1@10.242.238.88',789},{0,0}}, {{'ns_1@10.242.238.88',789},{0,0}}, {{'ns_1@10.242.238.88',790},{0,0}}, {{'ns_1@10.242.238.88',790},{0,0}}, {{'ns_1@10.242.238.88',791},{0,0}}, {{'ns_1@10.242.238.88',791},{0,0}}, {{'ns_1@10.242.238.88',792},{0,0}}, {{'ns_1@10.242.238.88',792},{0,0}}, {{'ns_1@10.242.238.88',793},{0,0}}, {{'ns_1@10.242.238.88',793},{0,0}}, {{'ns_1@10.242.238.88',794},{0,0}}, {{'ns_1@10.242.238.88',794},{0,0}}, {{'ns_1@10.242.238.88',795},{0,0}}, {{'ns_1@10.242.238.88',795},{0,0}}, {{'ns_1@10.242.238.88',796},{0,0}}, {{'ns_1@10.242.238.88',796},{0,0}}, {{'ns_1@10.242.238.88',797},{0,0}}, {{'ns_1@10.242.238.88',797},{0,0}}, {{'ns_1@10.242.238.88',798},{0,0}}, {{'ns_1@10.242.238.88',798},{0,0}}, {{'ns_1@10.242.238.88',799},{0,0}}, {{'ns_1@10.242.238.88',799},{0,0}}, {{'ns_1@10.242.238.88',800},{0,0}}, {{'ns_1@10.242.238.88',800},{0,0}}, {{'ns_1@10.242.238.88',801},{0,0}}, {{'ns_1@10.242.238.88',801},{0,0}}, {{'ns_1@10.242.238.88',802},{0,0}}, {{'ns_1@10.242.238.88',802},{0,0}}, {{'ns_1@10.242.238.88',803},{0,0}}, {{'ns_1@10.242.238.88',803},{0,0}}, {{'ns_1@10.242.238.88',804},{0,0}}, {{'ns_1@10.242.238.88',804},{0,0}}, {{'ns_1@10.242.238.88',805},{0,0}}, {{'ns_1@10.242.238.88',805},{0,0}}, {{'ns_1@10.242.238.88',806},{0,0}}, {{'ns_1@10.242.238.88',806},{0,0}}, {{'ns_1@10.242.238.88',807},{0,0}}, {{'ns_1@10.242.238.88',807},{0,0}}, {{'ns_1@10.242.238.88',808},{0,0}}, {{'ns_1@10.242.238.88',808},{0,0}}, {{'ns_1@10.242.238.88',809},{0,0}}, {{'ns_1@10.242.238.88',809},{0,0}}, {{'ns_1@10.242.238.88',810},{0,0}}, {{'ns_1@10.242.238.88',810},{0,0}}, {{'ns_1@10.242.238.88',811},{0,0}}, {{'ns_1@10.242.238.88',811},{0,0}}, {{'ns_1@10.242.238.88',812},{0,0}}, {{'ns_1@10.242.238.88',812},{0,0}}, {{'ns_1@10.242.238.88',813},{0,0}}, {{'ns_1@10.242.238.88',813},{0,0}}, {{'ns_1@10.242.238.88',814},{0,0}}, {{'ns_1@10.242.238.88',814},{0,0}}, {{'ns_1@10.242.238.88',815},{0,0}}, {{'ns_1@10.242.238.88',815},{0,0}}, {{'ns_1@10.242.238.88',816},{0,0}}, {{'ns_1@10.242.238.88',816},{0,0}}, {{'ns_1@10.242.238.88',817},{0,0}}, {{'ns_1@10.242.238.88',817},{0,0}}, {{'ns_1@10.242.238.88',818},{0,0}}, {{'ns_1@10.242.238.88',818},{0,0}}, {{'ns_1@10.242.238.88',819},{0,0}}, {{'ns_1@10.242.238.88',819},{0,0}}, {{'ns_1@10.242.238.88',820},{0,0}}, {{'ns_1@10.242.238.88',820},{0,0}}, {{'ns_1@10.242.238.88',821},{0,0}}, {{'ns_1@10.242.238.88',821},{0,0}}, {{'ns_1@10.242.238.88',822},{0,0}}, {{'ns_1@10.242.238.88',822},{0,0}}, {{'ns_1@10.242.238.88',823},{0,0}}, {{'ns_1@10.242.238.88',823},{0,0}}, {{'ns_1@10.242.238.88',824},{0,0}}, {{'ns_1@10.242.238.88',824},{0,0}}, {{'ns_1@10.242.238.88',825},{0,0}}, {{'ns_1@10.242.238.88',825},{0,0}}, {{'ns_1@10.242.238.88',826},{0,0}}, {{'ns_1@10.242.238.88',826},{0,0}}, {{'ns_1@10.242.238.88',827},{0,0}}, {{'ns_1@10.242.238.88',827},{0,0}}, {{'ns_1@10.242.238.88',828},{0,0}}, {{'ns_1@10.242.238.88',828},{0,0}}, {{'ns_1@10.242.238.88',829},{0,0}}, {{'ns_1@10.242.238.88',829},{0,0}}, {{'ns_1@10.242.238.88',830},{0,0}}, {{'ns_1@10.242.238.88',830},{0,0}}, {{'ns_1@10.242.238.88',831},{0,0}}, {{'ns_1@10.242.238.88',831},{0,0}}, {{'ns_1@10.242.238.88',832},{0,0}}, {{'ns_1@10.242.238.88',832},{0,0}}, {{'ns_1@10.242.238.88',833},{0,0}}, {{'ns_1@10.242.238.88',833},{0,0}}, {{'ns_1@10.242.238.88',834},{0,0}}, {{'ns_1@10.242.238.88',834},{0,0}}, {{'ns_1@10.242.238.88',835},{0,0}}, {{'ns_1@10.242.238.88',835},{0,0}}, {{'ns_1@10.242.238.88',836},{0,0}}, {{'ns_1@10.242.238.88',836},{0,0}}, {{'ns_1@10.242.238.88',837},{0,0}}, {{'ns_1@10.242.238.88',837},{0,0}}, {{'ns_1@10.242.238.88',838},{0,0}}, {{'ns_1@10.242.238.88',838},{0,0}}, {{'ns_1@10.242.238.88',839},{0,0}}, {{'ns_1@10.242.238.88',839},{0,0}}, {{'ns_1@10.242.238.88',840},{0,0}}, {{'ns_1@10.242.238.88',840},{0,0}}, {{'ns_1@10.242.238.88',841},{0,0}}, {{'ns_1@10.242.238.88',841},{0,0}}, {{'ns_1@10.242.238.88',842},{0,0}}, {{'ns_1@10.242.238.88',842},{0,0}}, {{'ns_1@10.242.238.88',843},{0,0}}, {{'ns_1@10.242.238.88',843},{0,0}}, {{'ns_1@10.242.238.88',844},{0,0}}, {{'ns_1@10.242.238.88',844},{0,0}}, {{'ns_1@10.242.238.88',845},{0,0}}, {{'ns_1@10.242.238.88',845},{0,0}}, {{'ns_1@10.242.238.88',846},{0,0}}, {{'ns_1@10.242.238.88',846},{0,0}}, {{'ns_1@10.242.238.88',847},{0,0}}, {{'ns_1@10.242.238.88',847},{0,0}}, {{'ns_1@10.242.238.88',848},{0,0}}, {{'ns_1@10.242.238.88',848},{0,0}}, {{'ns_1@10.242.238.88',849},{0,0}}, {{'ns_1@10.242.238.88',849},{0,0}}, {{'ns_1@10.242.238.88',850},{0,0}}, {{'ns_1@10.242.238.88',850},{0,0}}, {{'ns_1@10.242.238.88',851},{0,0}}, {{'ns_1@10.242.238.88',851},{0,0}}, {{'ns_1@10.242.238.88',852},{0,0}}, {{'ns_1@10.242.238.88',852},{0,0}}, {{'ns_1@10.242.238.88',853},{0,0}}, {{'ns_1@10.242.238.88',854},{0,0}}, {{'ns_1@10.242.238.88',855},{0,0}}, {{'ns_1@10.242.238.88',856},{0,0}}, {{'ns_1@10.242.238.88',857},{0,0}}, {{'ns_1@10.242.238.88',858},{0,0}}, {{'ns_1@10.242.238.88',859},{0,0}}, {{'ns_1@10.242.238.88',860},{0,0}}, {{'ns_1@10.242.238.88',861},{0,0}}, {{'ns_1@10.242.238.88',862},{0,0}}, {{'ns_1@10.242.238.88',863},{0,0}}, {{'ns_1@10.242.238.88',864},{0,0}}, {{'ns_1@10.242.238.88',865},{0,0}}, {{'ns_1@10.242.238.88',866},{0,0}}, {{'ns_1@10.242.238.88',867},{0,0}}, {{'ns_1@10.242.238.88',868},{0,0}}, {{'ns_1@10.242.238.88',869},{0,0}}, {{'ns_1@10.242.238.88',870},{0,0}}, {{'ns_1@10.242.238.88',871},{0,0}}, {{'ns_1@10.242.238.88',872},{0,0}}, {{'ns_1@10.242.238.88',873},{0,0}}, {{'ns_1@10.242.238.88',874},{0,0}}, {{'ns_1@10.242.238.88',875},{0,0}}, {{'ns_1@10.242.238.88',876},{0,0}}, {{'ns_1@10.242.238.88',877},{0,0}}, {{'ns_1@10.242.238.88',878},{0,0}}, {{'ns_1@10.242.238.88',879},{0,0}}, {{'ns_1@10.242.238.88',880},{0,0}}, {{'ns_1@10.242.238.88',881},{0,0}}, {{'ns_1@10.242.238.88',882},{0,0}}, {{'ns_1@10.242.238.88',883},{0,0}}, {{'ns_1@10.242.238.88',884},{0,0}}, {{'ns_1@10.242.238.88',885},{0,0}}, {{'ns_1@10.242.238.88',886},{0,0}}, {{'ns_1@10.242.238.88',887},{0,0}}, {{'ns_1@10.242.238.88',888},{0,0}}, {{'ns_1@10.242.238.88',889},{0,0}}, {{'ns_1@10.242.238.88',890},{0,0}}, {{'ns_1@10.242.238.88',891},{0,0}}, {{'ns_1@10.242.238.88',892},{0,0}}, {{'ns_1@10.242.238.88',893},{0,0}}, {{'ns_1@10.242.238.88',894},{0,0}}, {{'ns_1@10.242.238.88',895},{0,0}}, {{'ns_1@10.242.238.88',896},{0,0}}, {{'ns_1@10.242.238.88',897},{0,0}}, {{'ns_1@10.242.238.88',898},{0,0}}, {{'ns_1@10.242.238.88',899},{0,0}}, {{'ns_1@10.242.238.88',900},{0,0}}, {{'ns_1@10.242.238.88',901},{0,0}}, {{'ns_1@10.242.238.88',902},{0,0}}, {{'ns_1@10.242.238.88',903},{0,0}}, {{'ns_1@10.242.238.88',904},{0,0}}, {{'ns_1@10.242.238.88',905},{0,0}}, {{'ns_1@10.242.238.88',906},{0,0}}, {{'ns_1@10.242.238.88',907},{0,0}}, {{'ns_1@10.242.238.88',908},{0,0}}, {{'ns_1@10.242.238.88',909},{0,0}}, {{'ns_1@10.242.238.88',910},{0,0}}, {{'ns_1@10.242.238.88',911},{0,0}}, {{'ns_1@10.242.238.88',912},{0,0}}, {{'ns_1@10.242.238.88',913},{0,0}}, {{'ns_1@10.242.238.88',914},{0,0}}, {{'ns_1@10.242.238.88',915},{0,0}}, {{'ns_1@10.242.238.88',916},{0,0}}, {{'ns_1@10.242.238.88',917},{0,0}}, {{'ns_1@10.242.238.88',918},{0,0}}, {{'ns_1@10.242.238.88',919},{0,0}}, {{'ns_1@10.242.238.88',920},{0,0}}, {{'ns_1@10.242.238.88',921},{0,0}}, {{'ns_1@10.242.238.88',922},{0,0}}, {{'ns_1@10.242.238.88',923},{0,0}}, {{'ns_1@10.242.238.88',924},{0,0}}, {{'ns_1@10.242.238.88',925},{0,0}}, {{'ns_1@10.242.238.88',926},{0,0}}, {{'ns_1@10.242.238.88',927},{0,0}}, {{'ns_1@10.242.238.88',928},{0,0}}, {{'ns_1@10.242.238.88',929},{0,0}}, {{'ns_1@10.242.238.88',930},{0,0}}, {{'ns_1@10.242.238.88',931},{0,0}}, {{'ns_1@10.242.238.88',932},{0,0}}, {{'ns_1@10.242.238.88',933},{0,0}}, {{'ns_1@10.242.238.88',934},{0,0}}, {{'ns_1@10.242.238.88',935},{0,0}}, {{'ns_1@10.242.238.88',936},{0,0}}, {{'ns_1@10.242.238.88',937},{0,0}}, {{'ns_1@10.242.238.88',938},{0,0}}, {{'ns_1@10.242.238.88',939},{0,0}}, {{'ns_1@10.242.238.88',940},{0,0}}, {{'ns_1@10.242.238.88',941},{0,0}}, {{'ns_1@10.242.238.88',942},{0,0}}, {{'ns_1@10.242.238.88',943},{0,0}}, {{'ns_1@10.242.238.88',944},{0,0}}, {{'ns_1@10.242.238.88',945},{0,0}}, {{'ns_1@10.242.238.88',946},{0,0}}, {{'ns_1@10.242.238.88',947},{0,0}}, {{'ns_1@10.242.238.88',948},{0,0}}, {{'ns_1@10.242.238.88',949},{0,0}}, {{'ns_1@10.242.238.88',950},{0,0}}, {{'ns_1@10.242.238.88',951},{0,0}}, {{'ns_1@10.242.238.88',952},{0,0}}, {{'ns_1@10.242.238.88',953},{0,0}}, {{'ns_1@10.242.238.88',954},{0,0}}, {{'ns_1@10.242.238.88',955},{0,0}}, {{'ns_1@10.242.238.88',956},{0,0}}, {{'ns_1@10.242.238.88',957},{0,0}}, {{'ns_1@10.242.238.88',958},{0,0}}, {{'ns_1@10.242.238.88',959},{0,0}}, {{'ns_1@10.242.238.88',960},{0,0}}, {{'ns_1@10.242.238.88',961},{0,0}}, {{'ns_1@10.242.238.88',962},{0,0}}, {{'ns_1@10.242.238.88',963},{0,0}}, {{'ns_1@10.242.238.88',964},{0,0}}, {{'ns_1@10.242.238.88',965},{0,0}}, {{'ns_1@10.242.238.88',966},{0,0}}, {{'ns_1@10.242.238.88',967},{0,0}}, {{'ns_1@10.242.238.88',968},{0,0}}, {{'ns_1@10.242.238.88',969},{0,0}}, {{'ns_1@10.242.238.88',970},{0,0}}, {{'ns_1@10.242.238.88',971},{0,0}}, {{'ns_1@10.242.238.88',972},{0,0}}, {{'ns_1@10.242.238.88',973},{0,0}}, {{'ns_1@10.242.238.88',974},{0,0}}, {{'ns_1@10.242.238.88',975},{0,0}}, {{'ns_1@10.242.238.88',976},{0,0}}, {{'ns_1@10.242.238.88',977},{0,0}}, {{'ns_1@10.242.238.88',978},{0,0}}, {{'ns_1@10.242.238.88',979},{0,0}}, {{'ns_1@10.242.238.88',980},{0,0}}, {{'ns_1@10.242.238.88',981},{0,0}}, {{'ns_1@10.242.238.88',982},{0,0}}, {{'ns_1@10.242.238.88',983},{0,0}}, {{'ns_1@10.242.238.88',984},{0,0}}, {{'ns_1@10.242.238.88',985},{0,0}}, {{'ns_1@10.242.238.88',986},{0,0}}, {{'ns_1@10.242.238.88',987},{0,0}}, {{'ns_1@10.242.238.88',988},{0,0}}, {{'ns_1@10.242.238.88',989},{0,0}}, {{'ns_1@10.242.238.88',990},{0,0}}, {{'ns_1@10.242.238.88',991},{0,0}}, {{'ns_1@10.242.238.88',992},{0,0}}, {{'ns_1@10.242.238.88',993},{0,0}}, {{'ns_1@10.242.238.88',994},{0,0}}, {{'ns_1@10.242.238.88',995},{0,0}}, {{'ns_1@10.242.238.88',996},{0,0}}, {{'ns_1@10.242.238.88',997},{0,0}}, {{'ns_1@10.242.238.88',998},{0,0}}, {{'ns_1@10.242.238.88',999},{0,0}}, {{'ns_1@10.242.238.88',1000},{0,0}}, {{'ns_1@10.242.238.88',1001},{0,0}}, {{'ns_1@10.242.238.88',1002},{0,0}}, {{'ns_1@10.242.238.88',1003},{0,0}}, {{'ns_1@10.242.238.88',1004},{0,0}}, {{'ns_1@10.242.238.88',1005},{0,0}}, {{'ns_1@10.242.238.88',1006},{0,0}}, {{'ns_1@10.242.238.88',1007},{0,0}}, {{'ns_1@10.242.238.88',1008},{0,0}}, {{'ns_1@10.242.238.88',1009},{0,0}}, {{'ns_1@10.242.238.88',1010},{0,0}}, {{'ns_1@10.242.238.88',1011},{0,0}}, {{'ns_1@10.242.238.88',1012},{0,0}}, {{'ns_1@10.242.238.88',1013},{0,0}}, {{'ns_1@10.242.238.88',1014},{0,0}}, {{'ns_1@10.242.238.88',1015},{0,0}}, {{'ns_1@10.242.238.88',1016},{0,0}}, {{'ns_1@10.242.238.88',1017},{0,0}}, {{'ns_1@10.242.238.88',1018},{0,0}}, {{'ns_1@10.242.238.88',1019},{0,0}}, {{'ns_1@10.242.238.88',1020},{0,0}}, {{'ns_1@10.242.238.88',1021},{0,0}}, {{'ns_1@10.242.238.88',1022},{0,0}}, {{'ns_1@10.242.238.88',1023},{0,0}}, {{'ns_1@10.242.238.89',0},{0,0}}, {{'ns_1@10.242.238.89',1},{0,0}}, {{'ns_1@10.242.238.89',2},{0,0}}, {{'ns_1@10.242.238.89',3},{0,0}}, {{'ns_1@10.242.238.89',4},{0,0}}, {{'ns_1@10.242.238.89',5},{0,0}}, {{'ns_1@10.242.238.89',6},{0,0}}, {{'ns_1@10.242.238.89',7},{0,0}}, {{'ns_1@10.242.238.89',8},{0,0}}, {{'ns_1@10.242.238.89',9},{0,0}}, {{'ns_1@10.242.238.89',10},{0,0}}, {{'ns_1@10.242.238.89',11},{0,0}}, {{'ns_1@10.242.238.89',12},{0,0}}, {{'ns_1@10.242.238.89',13},{0,0}}, {{'ns_1@10.242.238.89',14},{0,0}}, {{'ns_1@10.242.238.89',15},{0,0}}, {{'ns_1@10.242.238.89',16},{0,0}}, {{'ns_1@10.242.238.89',17},{0,0}}, {{'ns_1@10.242.238.89',18},{0,0}}, {{'ns_1@10.242.238.89',19},{0,0}}, {{'ns_1@10.242.238.89',20},{0,0}}, {{'ns_1@10.242.238.89',21},{0,0}}, {{'ns_1@10.242.238.89',22},{0,0}}, {{'ns_1@10.242.238.89',23},{0,0}}, {{'ns_1@10.242.238.89',24},{0,0}}, {{'ns_1@10.242.238.89',25},{0,0}}, {{'ns_1@10.242.238.89',26},{0,0}}, {{'ns_1@10.242.238.89',27},{0,0}}, {{'ns_1@10.242.238.89',28},{0,0}}, {{'ns_1@10.242.238.89',29},{0,0}}, {{'ns_1@10.242.238.89',30},{0,0}}, {{'ns_1@10.242.238.89',31},{0,0}}, {{'ns_1@10.242.238.89',32},{0,0}}, {{'ns_1@10.242.238.89',33},{0,0}}, {{'ns_1@10.242.238.89',34},{0,0}}, {{'ns_1@10.242.238.89',35},{0,0}}, {{'ns_1@10.242.238.89',36},{0,0}}, {{'ns_1@10.242.238.89',37},{0,0}}, {{'ns_1@10.242.238.89',38},{0,0}}, {{'ns_1@10.242.238.89',39},{0,0}}, {{'ns_1@10.242.238.89',40},{0,0}}, {{'ns_1@10.242.238.89',41},{0,0}}, {{'ns_1@10.242.238.89',42},{0,0}}, {{'ns_1@10.242.238.89',43},{0,0}}, {{'ns_1@10.242.238.89',44},{0,0}}, {{'ns_1@10.242.238.89',45},{0,0}}, {{'ns_1@10.242.238.89',46},{0,0}}, {{'ns_1@10.242.238.89',47},{0,0}}, {{'ns_1@10.242.238.89',48},{0,0}}, {{'ns_1@10.242.238.89',49},{0,0}}, {{'ns_1@10.242.238.89',50},{0,0}}, {{'ns_1@10.242.238.89',51},{0,0}}, {{'ns_1@10.242.238.89',52},{0,0}}, {{'ns_1@10.242.238.89',53},{0,0}}, {{'ns_1@10.242.238.89',54},{0,0}}, {{'ns_1@10.242.238.89',55},{0,0}}, {{'ns_1@10.242.238.89',56},{0,0}}, {{'ns_1@10.242.238.89',57},{0,0}}, {{'ns_1@10.242.238.89',58},{0,0}}, {{'ns_1@10.242.238.89',59},{0,0}}, {{'ns_1@10.242.238.89',60},{0,0}}, {{'ns_1@10.242.238.89',61},{0,0}}, {{'ns_1@10.242.238.89',62},{0,0}}, {{'ns_1@10.242.238.89',63},{0,0}}, {{'ns_1@10.242.238.89',64},{0,0}}, {{'ns_1@10.242.238.89',65},{0,0}}, {{'ns_1@10.242.238.89',66},{0,0}}, {{'ns_1@10.242.238.89',67},{0,0}}, {{'ns_1@10.242.238.89',68},{0,0}}, {{'ns_1@10.242.238.89',69},{0,0}}, {{'ns_1@10.242.238.89',70},{0,0}}, {{'ns_1@10.242.238.89',71},{0,0}}, {{'ns_1@10.242.238.89',72},{0,0}}, {{'ns_1@10.242.238.89',73},{0,0}}, {{'ns_1@10.242.238.89',74},{0,0}}, {{'ns_1@10.242.238.89',75},{0,0}}, {{'ns_1@10.242.238.89',76},{0,0}}, {{'ns_1@10.242.238.89',77},{0,0}}, {{'ns_1@10.242.238.89',78},{0,0}}, {{'ns_1@10.242.238.89',79},{0,0}}, {{'ns_1@10.242.238.89',80},{0,0}}, {{'ns_1@10.242.238.89',81},{0,0}}, {{'ns_1@10.242.238.89',82},{0,0}}, {{'ns_1@10.242.238.89',83},{0,0}}, {{'ns_1@10.242.238.89',84},{0,0}}, {{'ns_1@10.242.238.89',85},{0,0}}, {{'ns_1@10.242.238.89',256},{0,0}}, {{'ns_1@10.242.238.89',257},{0,0}}, {{'ns_1@10.242.238.89',258},{0,0}}, {{'ns_1@10.242.238.89',259},{0,0}}, {{'ns_1@10.242.238.89',260},{0,0}}, {{'ns_1@10.242.238.89',261},{0,0}}, {{'ns_1@10.242.238.89',262},{0,0}}, {{'ns_1@10.242.238.89',263},{0,0}}, {{'ns_1@10.242.238.89',264},{0,0}}, {{'ns_1@10.242.238.89',265},{0,0}}, {{'ns_1@10.242.238.89',266},{0,0}}, {{'ns_1@10.242.238.89',267},{0,0}}, {{'ns_1@10.242.238.89',268},{0,0}}, {{'ns_1@10.242.238.89',269},{0,0}}, {{'ns_1@10.242.238.89',270},{0,0}}, {{'ns_1@10.242.238.89',271},{0,0}}, {{'ns_1@10.242.238.89',272},{0,0}}, {{'ns_1@10.242.238.89',273},{0,0}}, {{'ns_1@10.242.238.89',274},{0,0}}, {{'ns_1@10.242.238.89',275},{0,0}}, {{'ns_1@10.242.238.89',276},{0,0}}, {{'ns_1@10.242.238.89',277},{0,0}}, {{'ns_1@10.242.238.89',278},{0,0}}, {{'ns_1@10.242.238.89',279},{0,0}}, {{'ns_1@10.242.238.89',280},{0,0}}, {{'ns_1@10.242.238.89',281},{0,0}}, {{'ns_1@10.242.238.89',282},{0,0}}, {{'ns_1@10.242.238.89',283},{0,0}}, {{'ns_1@10.242.238.89',284},{0,0}}, {{'ns_1@10.242.238.89',285},{0,0}}, {{'ns_1@10.242.238.89',286},{0,0}}, {{'ns_1@10.242.238.89',287},{0,0}}, {{'ns_1@10.242.238.89',288},{0,0}}, {{'ns_1@10.242.238.89',289},{0,0}}, {{'ns_1@10.242.238.89',290},{0,0}}, {{'ns_1@10.242.238.89',291},{0,0}}, {{'ns_1@10.242.238.89',292},{0,0}}, {{'ns_1@10.242.238.89',293},{0,0}}, {{'ns_1@10.242.238.89',294},{0,0}}, {{'ns_1@10.242.238.89',295},{0,0}}, {{'ns_1@10.242.238.89',296},{0,0}}, {{'ns_1@10.242.238.89',297},{0,0}}, {{'ns_1@10.242.238.89',298},{0,0}}, {{'ns_1@10.242.238.89',299},{0,0}}, {{'ns_1@10.242.238.89',300},{0,0}}, {{'ns_1@10.242.238.89',301},{0,0}}, {{'ns_1@10.242.238.89',302},{0,0}}, {{'ns_1@10.242.238.89',303},{0,0}}, {{'ns_1@10.242.238.89',304},{0,0}}, {{'ns_1@10.242.238.89',305},{0,0}}, {{'ns_1@10.242.238.89',306},{0,0}}, {{'ns_1@10.242.238.89',307},{0,0}}, {{'ns_1@10.242.238.89',308},{0,0}}, {{'ns_1@10.242.238.89',309},{0,0}}, {{'ns_1@10.242.238.89',310},{0,0}}, {{'ns_1@10.242.238.89',311},{0,0}}, {{'ns_1@10.242.238.89',312},{0,0}}, {{'ns_1@10.242.238.89',313},{0,0}}, {{'ns_1@10.242.238.89',314},{0,0}}, {{'ns_1@10.242.238.89',315},{0,0}}, {{'ns_1@10.242.238.89',316},{0,0}}, {{'ns_1@10.242.238.89',317},{0,0}}, {{'ns_1@10.242.238.89',318},{0,0}}, {{'ns_1@10.242.238.89',319},{0,0}}, {{'ns_1@10.242.238.89',320},{0,0}}, {{'ns_1@10.242.238.89',321},{0,0}}, {{'ns_1@10.242.238.89',322},{0,0}}, {{'ns_1@10.242.238.89',323},{0,0}}, {{'ns_1@10.242.238.89',324},{0,0}}, {{'ns_1@10.242.238.89',325},{0,0}}, {{'ns_1@10.242.238.89',326},{0,0}}, {{'ns_1@10.242.238.89',327},{0,0}}, {{'ns_1@10.242.238.89',328},{0,0}}, {{'ns_1@10.242.238.89',329},{0,0}}, {{'ns_1@10.242.238.89',330},{0,0}}, {{'ns_1@10.242.238.89',331},{0,0}}, {{'ns_1@10.242.238.89',332},{0,0}}, {{'ns_1@10.242.238.89',333},{0,0}}, {{'ns_1@10.242.238.89',334},{0,0}}, {{'ns_1@10.242.238.89',335},{0,0}}, {{'ns_1@10.242.238.89',336},{0,0}}, {{'ns_1@10.242.238.89',337},{0,0}}, {{'ns_1@10.242.238.89',338},{0,0}}, {{'ns_1@10.242.238.89',339},{0,0}}, {{'ns_1@10.242.238.89',340},{0,0}}, {{'ns_1@10.242.238.89',341},{0,0}}, {{'ns_1@10.242.238.89',342},{0,0}}, {{'ns_1@10.242.238.89',343},{0,0}}, {{'ns_1@10.242.238.89',344},{0,0}}, {{'ns_1@10.242.238.89',345},{0,0}}, {{'ns_1@10.242.238.89',346},{0,0}}, {{'ns_1@10.242.238.89',347},{0,0}}, {{'ns_1@10.242.238.89',348},{0,0}}, {{'ns_1@10.242.238.89',349},{0,0}}, {{'ns_1@10.242.238.89',350},{0,0}}, {{'ns_1@10.242.238.89',351},{0,0}}, {{'ns_1@10.242.238.89',352},{0,0}}, {{'ns_1@10.242.238.89',353},{0,0}}, {{'ns_1@10.242.238.89',354},{0,0}}, {{'ns_1@10.242.238.89',355},{0,0}}, {{'ns_1@10.242.238.89',356},{0,0}}, {{'ns_1@10.242.238.89',357},{0,0}}, {{'ns_1@10.242.238.89',358},{0,0}}, {{'ns_1@10.242.238.89',359},{0,0}}, {{'ns_1@10.242.238.89',360},{0,0}}, {{'ns_1@10.242.238.89',361},{0,0}}, {{'ns_1@10.242.238.89',362},{0,0}}, {{'ns_1@10.242.238.89',363},{0,0}}, {{'ns_1@10.242.238.89',364},{0,0}}, {{'ns_1@10.242.238.89',365},{0,0}}, {{'ns_1@10.242.238.89',366},{0,0}}, {{'ns_1@10.242.238.89',367},{0,0}}, {{'ns_1@10.242.238.89',368},{0,0}}, {{'ns_1@10.242.238.89',369},{0,0}}, {{'ns_1@10.242.238.89',370},{0,0}}, {{'ns_1@10.242.238.89',371},{0,0}}, {{'ns_1@10.242.238.89',372},{0,0}}, {{'ns_1@10.242.238.89',373},{0,0}}, {{'ns_1@10.242.238.89',374},{0,0}}, {{'ns_1@10.242.238.89',375},{0,0}}, {{'ns_1@10.242.238.89',376},{0,0}}, {{'ns_1@10.242.238.89',377},{0,0}}, {{'ns_1@10.242.238.89',378},{0,0}}, {{'ns_1@10.242.238.89',379},{0,0}}, {{'ns_1@10.242.238.89',380},{0,0}}, {{'ns_1@10.242.238.89',381},{0,0}}, {{'ns_1@10.242.238.89',382},{0,0}}, {{'ns_1@10.242.238.89',383},{0,0}}, {{'ns_1@10.242.238.89',384},{0,0}}, {{'ns_1@10.242.238.89',385},{0,0}}, {{'ns_1@10.242.238.89',386},{0,0}}, {{'ns_1@10.242.238.89',387},{0,0}}, {{'ns_1@10.242.238.89',388},{0,0}}, {{'ns_1@10.242.238.89',389},{0,0}}, {{'ns_1@10.242.238.89',390},{0,0}}, {{'ns_1@10.242.238.89',391},{0,0}}, {{'ns_1@10.242.238.89',392},{0,0}}, {{'ns_1@10.242.238.89',393},{0,0}}, {{'ns_1@10.242.238.89',394},{0,0}}, {{'ns_1@10.242.238.89',395},{0,0}}, {{'ns_1@10.242.238.89',396},{0,0}}, {{'ns_1@10.242.238.89',397},{0,0}}, {{'ns_1@10.242.238.89',398},{0,0}}, {{'ns_1@10.242.238.89',399},{0,0}}, {{'ns_1@10.242.238.89',400},{0,0}}, {{'ns_1@10.242.238.89',401},{0,0}}, {{'ns_1@10.242.238.89',402},{0,0}}, {{'ns_1@10.242.238.89',403},{0,0}}, {{'ns_1@10.242.238.89',404},{0,0}}, {{'ns_1@10.242.238.89',405},{0,0}}, {{'ns_1@10.242.238.89',406},{0,0}}, {{'ns_1@10.242.238.89',407},{0,0}}, {{'ns_1@10.242.238.89',408},{0,0}}, {{'ns_1@10.242.238.89',409},{0,0}}, {{'ns_1@10.242.238.89',410},{0,0}}, {{'ns_1@10.242.238.89',411},{0,0}}, {{'ns_1@10.242.238.89',412},{0,0}}, {{'ns_1@10.242.238.89',413},{0,0}}, {{'ns_1@10.242.238.89',414},{0,0}}, {{'ns_1@10.242.238.89',415},{0,0}}, {{'ns_1@10.242.238.89',416},{0,0}}, {{'ns_1@10.242.238.89',417},{0,0}}, {{'ns_1@10.242.238.89',418},{0,0}}, {{'ns_1@10.242.238.89',419},{0,0}}, {{'ns_1@10.242.238.89',420},{0,0}}, {{'ns_1@10.242.238.89',421},{0,0}}, {{'ns_1@10.242.238.89',422},{0,0}}, {{'ns_1@10.242.238.89',423},{0,0}}, {{'ns_1@10.242.238.89',424},{0,0}}, {{'ns_1@10.242.238.89',425},{0,0}}, {{'ns_1@10.242.238.89',426},{0,0}}, {{'ns_1@10.242.238.89',427},{0,0}}, {{'ns_1@10.242.238.89',428},{0,0}}, {{'ns_1@10.242.238.89',429},{0,0}}, {{'ns_1@10.242.238.89',430},{0,0}}, {{'ns_1@10.242.238.89',431},{0,0}}, {{'ns_1@10.242.238.89',432},{0,0}}, {{'ns_1@10.242.238.89',433},{0,0}}, {{'ns_1@10.242.238.89',434},{0,0}}, {{'ns_1@10.242.238.89',435},{0,0}}, {{'ns_1@10.242.238.89',436},{0,0}}, {{'ns_1@10.242.238.89',437},{0,0}}, {{'ns_1@10.242.238.89',438},{0,0}}, {{'ns_1@10.242.238.89',439},{0,0}}, {{'ns_1@10.242.238.89',440},{0,0}}, {{'ns_1@10.242.238.89',441},{0,0}}, {{'ns_1@10.242.238.89',442},{0,0}}, {{'ns_1@10.242.238.89',443},{0,0}}, {{'ns_1@10.242.238.89',444},{0,0}}, {{'ns_1@10.242.238.89',445},{0,0}}, {{'ns_1@10.242.238.89',446},{0,0}}, {{'ns_1@10.242.238.89',447},{0,0}}, {{'ns_1@10.242.238.89',448},{0,0}}, {{'ns_1@10.242.238.89',449},{0,0}}, {{'ns_1@10.242.238.89',450},{0,0}}, {{'ns_1@10.242.238.89',451},{0,0}}, {{'ns_1@10.242.238.89',452},{0,0}}, {{'ns_1@10.242.238.89',453},{0,0}}, {{'ns_1@10.242.238.89',454},{0,0}}, {{'ns_1@10.242.238.89',455},{0,0}}, {{'ns_1@10.242.238.89',456},{0,0}}, {{'ns_1@10.242.238.89',457},{0,0}}, {{'ns_1@10.242.238.89',458},{0,0}}, {{'ns_1@10.242.238.89',459},{0,0}}, {{'ns_1@10.242.238.89',460},{0,0}}, {{'ns_1@10.242.238.89',461},{0,0}}, {{'ns_1@10.242.238.89',462},{0,0}}, {{'ns_1@10.242.238.89',463},{0,0}}, {{'ns_1@10.242.238.89',464},{0,0}}, {{'ns_1@10.242.238.89',465},{0,0}}, {{'ns_1@10.242.238.89',466},{0,0}}, {{'ns_1@10.242.238.89',467},{0,0}}, {{'ns_1@10.242.238.89',468},{0,0}}, {{'ns_1@10.242.238.89',469},{0,0}}, {{'ns_1@10.242.238.89',470},{0,0}}, {{'ns_1@10.242.238.89',471},{0,0}}, {{'ns_1@10.242.238.89',472},{0,0}}, {{'ns_1@10.242.238.89',473},{0,0}}, {{'ns_1@10.242.238.89',474},{0,0}}, {{'ns_1@10.242.238.89',475},{0,0}}, {{'ns_1@10.242.238.89',476},{0,0}}, {{'ns_1@10.242.238.89',477},{0,0}}, {{'ns_1@10.242.238.89',478},{0,0}}, {{'ns_1@10.242.238.89',479},{0,0}}, {{'ns_1@10.242.238.89',480},{0,0}}, {{'ns_1@10.242.238.89',481},{0,0}}, {{'ns_1@10.242.238.89',482},{0,0}}, {{'ns_1@10.242.238.89',483},{0,0}}, {{'ns_1@10.242.238.89',484},{0,0}}, {{'ns_1@10.242.238.89',485},{0,0}}, {{'ns_1@10.242.238.89',486},{0,0}}, {{'ns_1@10.242.238.89',487},{0,0}}, {{'ns_1@10.242.238.89',488},{0,0}}, {{'ns_1@10.242.238.89',489},{0,0}}, {{'ns_1@10.242.238.89',490},{0,0}}, {{'ns_1@10.242.238.89',491},{0,0}}, {{'ns_1@10.242.238.89',492},{0,0}}, {{'ns_1@10.242.238.89',493},{0,0}}, {{'ns_1@10.242.238.89',494},{0,0}}, {{'ns_1@10.242.238.89',495},{0,0}}, {{'ns_1@10.242.238.89',496},{0,0}}, {{'ns_1@10.242.238.89',497},{0,0}}, {{'ns_1@10.242.238.89',498},{0,0}}, {{'ns_1@10.242.238.89',499},{0,0}}, {{'ns_1@10.242.238.89',500},{0,0}}, {{'ns_1@10.242.238.89',501},{0,0}}, {{'ns_1@10.242.238.89',502},{0,0}}, {{'ns_1@10.242.238.89',503},{0,0}}, {{'ns_1@10.242.238.89',504},{0,0}}, {{'ns_1@10.242.238.89',505},{0,0}}, {{'ns_1@10.242.238.89',506},{0,0}}, {{'ns_1@10.242.238.89',507},{0,0}}, {{'ns_1@10.242.238.89',508},{0,0}}, {{'ns_1@10.242.238.89',509},{0,0}}, {{'ns_1@10.242.238.89',510},{0,0}}, {{'ns_1@10.242.238.89',511},{0,0}}, {{'ns_1@10.242.238.89',597},{0,0}}, {{'ns_1@10.242.238.89',598},{0,0}}, {{'ns_1@10.242.238.89',599},{0,0}}, {{'ns_1@10.242.238.89',600},{0,0}}, {{'ns_1@10.242.238.89',601},{0,0}}, {{'ns_1@10.242.238.89',602},{0,0}}, {{'ns_1@10.242.238.89',603},{0,0}}, {{'ns_1@10.242.238.89',604},{0,0}}, {{'ns_1@10.242.238.89',605},{0,0}}, {{'ns_1@10.242.238.89',606},{0,0}}, {{'ns_1@10.242.238.89',607},{0,0}}, {{'ns_1@10.242.238.89',608},{0,0}}, {{'ns_1@10.242.238.89',609},{0,0}}, {{'ns_1@10.242.238.89',610},{0,0}}, {{'ns_1@10.242.238.89',611},{0,0}}, {{'ns_1@10.242.238.89',612},{0,0}}, {{'ns_1@10.242.238.89',613},{0,0}}, {{'ns_1@10.242.238.89',614},{0,0}}, {{'ns_1@10.242.238.89',615},{0,0}}, {{'ns_1@10.242.238.89',616},{0,0}}, {{'ns_1@10.242.238.89',617},{0,0}}, {{'ns_1@10.242.238.89',618},{0,0}}, {{'ns_1@10.242.238.89',619},{0,0}}, {{'ns_1@10.242.238.89',620},{0,0}}, {{'ns_1@10.242.238.89',621},{0,0}}, {{'ns_1@10.242.238.89',622},{0,0}}, {{'ns_1@10.242.238.89',623},{0,0}}, {{'ns_1@10.242.238.89',624},{0,0}}, {{'ns_1@10.242.238.89',625},{0,0}}, {{'ns_1@10.242.238.89',626},{0,0}}, {{'ns_1@10.242.238.89',627},{0,0}}, {{'ns_1@10.242.238.89',628},{0,0}}, {{'ns_1@10.242.238.89',629},{0,0}}, {{'ns_1@10.242.238.89',630},{0,0}}, {{'ns_1@10.242.238.89',631},{0,0}}, {{'ns_1@10.242.238.89',632},{0,0}}, {{'ns_1@10.242.238.89',633},{0,0}}, {{'ns_1@10.242.238.89',634},{0,0}}, {{'ns_1@10.242.238.89',635},{0,0}}, {{'ns_1@10.242.238.89',636},{0,0}}, {{'ns_1@10.242.238.89',637},{0,0}}, {{'ns_1@10.242.238.89',638},{0,0}}, {{'ns_1@10.242.238.89',639},{0,0}}, {{'ns_1@10.242.238.89',640},{0,0}}, {{'ns_1@10.242.238.89',641},{0,0}}, {{'ns_1@10.242.238.89',642},{0,0}}, {{'ns_1@10.242.238.89',643},{0,0}}, {{'ns_1@10.242.238.89',644},{0,0}}, {{'ns_1@10.242.238.89',645},{0,0}}, {{'ns_1@10.242.238.89',646},{0,0}}, {{'ns_1@10.242.238.89',647},{0,0}}, {{'ns_1@10.242.238.89',648},{0,0}}, {{'ns_1@10.242.238.89',649},{0,0}}, {{'ns_1@10.242.238.89',650},{0,0}}, {{'ns_1@10.242.238.89',651},{0,0}}, {{'ns_1@10.242.238.89',652},{0,0}}, {{'ns_1@10.242.238.89',653},{0,0}}, {{'ns_1@10.242.238.89',654},{0,0}}, {{'ns_1@10.242.238.89',655},{0,0}}, {{'ns_1@10.242.238.89',656},{0,0}}, {{'ns_1@10.242.238.89',657},{0,0}}, {{'ns_1@10.242.238.89',658},{0,0}}, {{'ns_1@10.242.238.89',659},{0,0}}, {{'ns_1@10.242.238.89',660},{0,0}}, {{'ns_1@10.242.238.89',661},{0,0}}, {{'ns_1@10.242.238.89',662},{0,0}}, {{'ns_1@10.242.238.89',663},{0,0}}, {{'ns_1@10.242.238.89',664},{0,0}}, {{'ns_1@10.242.238.89',665},{0,0}}, {{'ns_1@10.242.238.89',666},{0,0}}, {{'ns_1@10.242.238.89',667},{0,0}}, {{'ns_1@10.242.238.89',668},{0,0}}, {{'ns_1@10.242.238.89',669},{0,0}}, {{'ns_1@10.242.238.89',670},{0,0}}, {{'ns_1@10.242.238.89',671},{0,0}}, {{'ns_1@10.242.238.89',672},{0,0}}, {{'ns_1@10.242.238.89',673},{0,0}}, {{'ns_1@10.242.238.89',674},{0,0}}, {{'ns_1@10.242.238.89',675},{0,0}}, {{'ns_1@10.242.238.89',676},{0,0}}, {{'ns_1@10.242.238.89',677},{0,0}}, {{'ns_1@10.242.238.89',678},{0,0}}, {{'ns_1@10.242.238.89',679},{0,0}}, {{'ns_1@10.242.238.89',680},{0,0}}, {{'ns_1@10.242.238.89',681},{0,0}}, {{'ns_1@10.242.238.89',853},{0,0}}, {{'ns_1@10.242.238.89',854},{0,0}}, {{'ns_1@10.242.238.89',855},{0,0}}, {{'ns_1@10.242.238.89',856},{0,0}}, {{'ns_1@10.242.238.89',857},{0,0}}, {{'ns_1@10.242.238.89',858},{0,0}}, {{'ns_1@10.242.238.89',859},{0,0}}, {{'ns_1@10.242.238.89',860},{0,0}}, {{'ns_1@10.242.238.89',861},{0,0}}, {{'ns_1@10.242.238.89',862},{0,0}}, {{'ns_1@10.242.238.89',863},{0,0}}, {{'ns_1@10.242.238.89',864},{0,0}}, {{'ns_1@10.242.238.89',865},{0,0}}, {{'ns_1@10.242.238.89',866},{0,0}}, {{'ns_1@10.242.238.89',867},{0,0}}, {{'ns_1@10.242.238.89',868},{0,0}}, {{'ns_1@10.242.238.89',869},{0,0}}, {{'ns_1@10.242.238.89',870},{0,0}}, {{'ns_1@10.242.238.89',871},{0,0}}, {{'ns_1@10.242.238.89',872},{0,0}}, {{'ns_1@10.242.238.89',873},{0,0}}, {{'ns_1@10.242.238.89',874},{0,0}}, {{'ns_1@10.242.238.89',875},{0,0}}, {{'ns_1@10.242.238.89',876},{0,0}}, {{'ns_1@10.242.238.89',877},{0,0}}, {{'ns_1@10.242.238.89',878},{0,0}}, {{'ns_1@10.242.238.89',879},{0,0}}, {{'ns_1@10.242.238.89',880},{0,0}}, {{'ns_1@10.242.238.89',881},{0,0}}, {{'ns_1@10.242.238.89',882},{0,0}}, {{'ns_1@10.242.238.89',883},{0,0}}, {{'ns_1@10.242.238.89',884},{0,0}}, {{'ns_1@10.242.238.89',885},{0,0}}, {{'ns_1@10.242.238.89',886},{0,0}}, {{'ns_1@10.242.238.89',887},{0,0}}, {{'ns_1@10.242.238.89',888},{0,0}}, {{'ns_1@10.242.238.89',889},{0,0}}, {{'ns_1@10.242.238.89',890},{0,0}}, {{'ns_1@10.242.238.89',891},{0,0}}, {{'ns_1@10.242.238.89',892},{0,0}}, {{'ns_1@10.242.238.89',893},{0,0}}, {{'ns_1@10.242.238.89',894},{0,0}}, {{'ns_1@10.242.238.89',895},{0,0}}, {{'ns_1@10.242.238.89',896},{0,0}}, {{'ns_1@10.242.238.89',897},{0,0}}, {{'ns_1@10.242.238.89',898},{0,0}}, {{'ns_1@10.242.238.89',899},{0,0}}, {{'ns_1@10.242.238.89',900},{0,0}}, {{'ns_1@10.242.238.89',901},{0,0}}, {{'ns_1@10.242.238.89',902},{0,0}}, {{'ns_1@10.242.238.89',903},{0,0}}, {{'ns_1@10.242.238.89',904},{0,0}}, {{'ns_1@10.242.238.89',905},{0,0}}, {{'ns_1@10.242.238.89',906},{0,0}}, {{'ns_1@10.242.238.89',907},{0,0}}, {{'ns_1@10.242.238.89',908},{0,0}}, {{'ns_1@10.242.238.89',909},{0,0}}, {{'ns_1@10.242.238.89',910},{0,0}}, {{'ns_1@10.242.238.89',911},{0,0}}, {{'ns_1@10.242.238.89',912},{0,0}}, {{'ns_1@10.242.238.89',913},{0,0}}, {{'ns_1@10.242.238.89',914},{0,0}}, {{'ns_1@10.242.238.89',915},{0,0}}, {{'ns_1@10.242.238.89',916},{0,0}}, {{'ns_1@10.242.238.89',917},{0,0}}, {{'ns_1@10.242.238.89',918},{0,0}}, {{'ns_1@10.242.238.89',919},{0,0}}, {{'ns_1@10.242.238.89',920},{0,0}}, {{'ns_1@10.242.238.89',921},{0,0}}, {{'ns_1@10.242.238.89',922},{0,0}}, {{'ns_1@10.242.238.89',923},{0,0}}, {{'ns_1@10.242.238.89',924},{0,0}}, {{'ns_1@10.242.238.89',925},{0,0}}, {{'ns_1@10.242.238.89',926},{0,0}}, {{'ns_1@10.242.238.89',927},{0,0}}, {{'ns_1@10.242.238.89',928},{0,0}}, {{'ns_1@10.242.238.89',929},{0,0}}, {{'ns_1@10.242.238.89',930},{0,0}}, {{'ns_1@10.242.238.89',931},{0,0}}, {{'ns_1@10.242.238.89',932},{0,0}}, {{'ns_1@10.242.238.89',933},{0,0}}, {{'ns_1@10.242.238.89',934},{0,0}}, {{'ns_1@10.242.238.89',935},{0,0}}, {{'ns_1@10.242.238.89',936},{0,0}}, {{'ns_1@10.242.238.89',937},{0,0}}, {{'ns_1@10.242.238.90',86},{0,0}}, {{'ns_1@10.242.238.90',87},{0,0}}, {{'ns_1@10.242.238.90',88},{0,0}}, {{'ns_1@10.242.238.90',89},{0,0}}, {{'ns_1@10.242.238.90',90},{0,0}}, {{'ns_1@10.242.238.90',91},{0,0}}, {{'ns_1@10.242.238.90',92},{0,0}}, {{'ns_1@10.242.238.90',93},{0,0}}, {{'ns_1@10.242.238.90',94},{0,0}}, {{'ns_1@10.242.238.90',95},{0,0}}, {{'ns_1@10.242.238.90',96},{0,0}}, {{'ns_1@10.242.238.90',97},{0,0}}, {{'ns_1@10.242.238.90',98},{0,0}}, {{'ns_1@10.242.238.90',99},{0,0}}, {{'ns_1@10.242.238.90',100},{0,0}}, {{'ns_1@10.242.238.90',101},{0,0}}, {{'ns_1@10.242.238.90',102},{0,0}}, {{'ns_1@10.242.238.90',103},{0,0}}, {{'ns_1@10.242.238.90',104},{0,0}}, {{'ns_1@10.242.238.90',105},{0,0}}, {{'ns_1@10.242.238.90',106},{0,0}}, {{'ns_1@10.242.238.90',107},{0,0}}, {{'ns_1@10.242.238.90',108},{0,0}}, {{'ns_1@10.242.238.90',109},{0,0}}, {{'ns_1@10.242.238.90',110},{0,0}}, {{'ns_1@10.242.238.90',111},{0,0}}, {{'ns_1@10.242.238.90',112},{0,0}}, {{'ns_1@10.242.238.90',113},{0,0}}, {{'ns_1@10.242.238.90',114},{0,0}}, {{'ns_1@10.242.238.90',115},{0,0}}, {{'ns_1@10.242.238.90',116},{0,0}}, {{'ns_1@10.242.238.90',117},{0,0}}, {{'ns_1@10.242.238.90',118},{0,0}}, {{'ns_1@10.242.238.90',119},{0,0}}, {{'ns_1@10.242.238.90',120},{0,0}}, {{'ns_1@10.242.238.90',121},{0,0}}, {{'ns_1@10.242.238.90',122},{0,0}}, {{'ns_1@10.242.238.90',123},{0,0}}, {{'ns_1@10.242.238.90',124},{0,0}}, {{'ns_1@10.242.238.90',125},{0,0}}, {{'ns_1@10.242.238.90',126},{0,0}}, {{'ns_1@10.242.238.90',127},{0,0}}, {{'ns_1@10.242.238.90',128},{0,0}}, {{'ns_1@10.242.238.90',129},{0,0}}, {{'ns_1@10.242.238.90',130},{0,0}}, {{'ns_1@10.242.238.90',131},{0,0}}, {{'ns_1@10.242.238.90',132},{0,0}}, {{'ns_1@10.242.238.90',133},{0,0}}, {{'ns_1@10.242.238.90',134},{0,0}}, {{'ns_1@10.242.238.90',135},{0,0}}, {{'ns_1@10.242.238.90',136},{0,0}}, {{'ns_1@10.242.238.90',137},{0,0}}, {{'ns_1@10.242.238.90',138},{0,0}}, {{'ns_1@10.242.238.90',139},{0,0}}, {{'ns_1@10.242.238.90',140},{0,0}}, {{'ns_1@10.242.238.90',141},{0,0}}, {{'ns_1@10.242.238.90',142},{0,0}}, {{'ns_1@10.242.238.90',143},{0,0}}, {{'ns_1@10.242.238.90',144},{0,0}}, {{'ns_1@10.242.238.90',145},{0,0}}, {{'ns_1@10.242.238.90',146},{0,0}}, {{'ns_1@10.242.238.90',147},{0,0}}, {{'ns_1@10.242.238.90',148},{0,0}}, {{'ns_1@10.242.238.90',149},{0,0}}, {{'ns_1@10.242.238.90',150},{0,0}}, {{'ns_1@10.242.238.90',151},{0,0}}, {{'ns_1@10.242.238.90',152},{0,0}}, {{'ns_1@10.242.238.90',153},{0,0}}, {{'ns_1@10.242.238.90',154},{0,0}}, {{'ns_1@10.242.238.90',155},{0,0}}, {{'ns_1@10.242.238.90',156},{0,0}}, {{'ns_1@10.242.238.90',157},{0,0}}, {{'ns_1@10.242.238.90',158},{0,0}}, {{'ns_1@10.242.238.90',159},{0,0}}, {{'ns_1@10.242.238.90',160},{0,0}}, {{'ns_1@10.242.238.90',161},{0,0}}, {{'ns_1@10.242.238.90',162},{0,0}}, {{'ns_1@10.242.238.90',163},{0,0}}, {{'ns_1@10.242.238.90',164},{0,0}}, {{'ns_1@10.242.238.90',165},{0,0}}, {{'ns_1@10.242.238.90',166},{0,0}}, {{'ns_1@10.242.238.90',167},{0,0}}, {{'ns_1@10.242.238.90',168},{0,0}}, {{'ns_1@10.242.238.90',169},{0,0}}, {{'ns_1@10.242.238.90',170},{0,0}}, {{'ns_1@10.242.238.90',342},{0,0}}, {{'ns_1@10.242.238.90',343},{0,0}}, {{'ns_1@10.242.238.90',344},{0,0}}, {{'ns_1@10.242.238.90',345},{0,0}}, {{'ns_1@10.242.238.90',346},{0,0}}, {{'ns_1@10.242.238.90',347},{0,0}}, {{'ns_1@10.242.238.90',348},{0,0}}, {{'ns_1@10.242.238.90',349},{0,0}}, {{'ns_1@10.242.238.90',350},{0,0}}, {{'ns_1@10.242.238.90',351},{0,0}}, {{'ns_1@10.242.238.90',352},{0,0}}, {{'ns_1@10.242.238.90',353},{0,0}}, {{'ns_1@10.242.238.90',354},{0,0}}, {{'ns_1@10.242.238.90',355},{0,0}}, {{'ns_1@10.242.238.90',356},{0,0}}, {{'ns_1@10.242.238.90',357},{0,0}}, {{'ns_1@10.242.238.90',358},{0,0}}, {{'ns_1@10.242.238.90',359},{0,0}}, {{'ns_1@10.242.238.90',360},{0,0}}, {{'ns_1@10.242.238.90',361},{0,0}}, {{'ns_1@10.242.238.90',362},{0,0}}, {{'ns_1@10.242.238.90',363},{0,0}}, {{'ns_1@10.242.238.90',364},{0,0}}, {{'ns_1@10.242.238.90',365},{0,0}}, {{'ns_1@10.242.238.90',366},{0,0}}, {{'ns_1@10.242.238.90',367},{0,0}}, {{'ns_1@10.242.238.90',368},{0,0}}, {{'ns_1@10.242.238.90',369},{0,0}}, {{'ns_1@10.242.238.90',370},{0,0}}, {{'ns_1@10.242.238.90',371},{0,0}}, {{'ns_1@10.242.238.90',372},{0,0}}, {{'ns_1@10.242.238.90',373},{0,0}}, {{'ns_1@10.242.238.90',374},{0,0}}, {{'ns_1@10.242.238.90',375},{0,0}}, {{'ns_1@10.242.238.90',376},{0,0}}, {{'ns_1@10.242.238.90',377},{0,0}}, {{'ns_1@10.242.238.90',378},{0,0}}, {{'ns_1@10.242.238.90',379},{0,0}}, {{'ns_1@10.242.238.90',380},{0,0}}, {{'ns_1@10.242.238.90',381},{0,0}}, {{'ns_1@10.242.238.90',382},{0,0}}, {{'ns_1@10.242.238.90',383},{0,0}}, {{'ns_1@10.242.238.90',384},{0,0}}, {{'ns_1@10.242.238.90',385},{0,0}}, {{'ns_1@10.242.238.90',386},{0,0}}, {{'ns_1@10.242.238.90',387},{0,0}}, {{'ns_1@10.242.238.90',388},{0,0}}, {{'ns_1@10.242.238.90',389},{0,0}}, {{'ns_1@10.242.238.90',390},{0,0}}, {{'ns_1@10.242.238.90',391},{0,0}}, {{'ns_1@10.242.238.90',392},{0,0}}, {{'ns_1@10.242.238.90',393},{0,0}}, {{'ns_1@10.242.238.90',394},{0,0}}, {{'ns_1@10.242.238.90',395},{0,0}}, {{'ns_1@10.242.238.90',396},{0,0}}, {{'ns_1@10.242.238.90',397},{0,0}}, {{'ns_1@10.242.238.90',398},{0,0}}, {{'ns_1@10.242.238.90',399},{0,0}}, {{'ns_1@10.242.238.90',400},{0,0}}, {{'ns_1@10.242.238.90',401},{0,0}}, {{'ns_1@10.242.238.90',402},{0,0}}, {{'ns_1@10.242.238.90',403},{0,0}}, {{'ns_1@10.242.238.90',404},{0,0}}, {{'ns_1@10.242.238.90',405},{0,0}}, {{'ns_1@10.242.238.90',406},{0,0}}, {{'ns_1@10.242.238.90',407},{0,0}}, {{'ns_1@10.242.238.90',408},{0,0}}, {{'ns_1@10.242.238.90',409},{0,0}}, {{'ns_1@10.242.238.90',410},{0,0}}, {{'ns_1@10.242.238.90',411},{0,0}}, {{'ns_1@10.242.238.90',412},{0,0}}, {{'ns_1@10.242.238.90',413},{0,0}}, {{'ns_1@10.242.238.90',414},{0,0}}, {{'ns_1@10.242.238.90',415},{0,0}}, {{'ns_1@10.242.238.90',416},{0,0}}, {{'ns_1@10.242.238.90',417},{0,0}}, {{'ns_1@10.242.238.90',418},{0,0}}, {{'ns_1@10.242.238.90',419},{0,0}}, {{'ns_1@10.242.238.90',420},{0,0}}, {{'ns_1@10.242.238.90',421},{0,0}}, {{'ns_1@10.242.238.90',422},{0,0}}, {{'ns_1@10.242.238.90',423},{0,0}}, {{'ns_1@10.242.238.90',424},{0,0}}, {{'ns_1@10.242.238.90',425},{0,0}}, {{'ns_1@10.242.238.90',426},{0,0}}, {{'ns_1@10.242.238.90',512},{0,0}}, {{'ns_1@10.242.238.90',513},{0,0}}, {{'ns_1@10.242.238.90',514},{0,0}}, {{'ns_1@10.242.238.90',515},{0,0}}, {{'ns_1@10.242.238.90',516},{0,0}}, {{'ns_1@10.242.238.90',517},{0,0}}, {{'ns_1@10.242.238.90',518},{0,0}}, {{'ns_1@10.242.238.90',519},{0,0}}, {{'ns_1@10.242.238.90',520},{0,0}}, {{'ns_1@10.242.238.90',521},{0,0}}, {{'ns_1@10.242.238.90',522},{0,0}}, {{'ns_1@10.242.238.90',523},{0,0}}, {{'ns_1@10.242.238.90',524},{0,0}}, {{'ns_1@10.242.238.90',525},{0,0}}, {{'ns_1@10.242.238.90',526},{0,0}}, {{'ns_1@10.242.238.90',527},{0,0}}, {{'ns_1@10.242.238.90',528},{0,0}}, {{'ns_1@10.242.238.90',529},{0,0}}, {{'ns_1@10.242.238.90',530},{0,0}}, {{'ns_1@10.242.238.90',531},{0,0}}, {{'ns_1@10.242.238.90',532},{0,0}}, {{'ns_1@10.242.238.90',533},{0,0}}, {{'ns_1@10.242.238.90',534},{0,0}}, {{'ns_1@10.242.238.90',535},{0,0}}, {{'ns_1@10.242.238.90',536},{0,0}}, {{'ns_1@10.242.238.90',537},{0,0}}, {{'ns_1@10.242.238.90',538},{0,0}}, {{'ns_1@10.242.238.90',539},{0,0}}, {{'ns_1@10.242.238.90',540},{0,0}}, {{'ns_1@10.242.238.90',541},{0,0}}, {{'ns_1@10.242.238.90',542},{0,0}}, {{'ns_1@10.242.238.90',543},{0,0}}, {{'ns_1@10.242.238.90',544},{0,0}}, {{'ns_1@10.242.238.90',545},{0,0}}, {{'ns_1@10.242.238.90',546},{0,0}}, {{'ns_1@10.242.238.90',547},{0,0}}, {{'ns_1@10.242.238.90',548},{0,0}}, {{'ns_1@10.242.238.90',549},{0,0}}, {{'ns_1@10.242.238.90',550},{0,0}}, {{'ns_1@10.242.238.90',551},{0,0}}, {{'ns_1@10.242.238.90',552},{0,0}}, {{'ns_1@10.242.238.90',553},{0,0}}, {{'ns_1@10.242.238.90',554},{0,0}}, {{'ns_1@10.242.238.90',555},{0,0}}, {{'ns_1@10.242.238.90',556},{0,0}}, {{'ns_1@10.242.238.90',557},{0,0}}, {{'ns_1@10.242.238.90',558},{0,0}}, {{'ns_1@10.242.238.90',559},{0,0}}, {{'ns_1@10.242.238.90',560},{0,0}}, {{'ns_1@10.242.238.90',561},{0,0}}, {{'ns_1@10.242.238.90',562},{0,0}}, {{'ns_1@10.242.238.90',563},{0,0}}, {{'ns_1@10.242.238.90',564},{0,0}}, {{'ns_1@10.242.238.90',565},{0,0}}, {{'ns_1@10.242.238.90',566},{0,0}}, {{'ns_1@10.242.238.90',567},{0,0}}, {{'ns_1@10.242.238.90',568},{0,0}}, {{'ns_1@10.242.238.90',569},{0,0}}, {{'ns_1@10.242.238.90',570},{0,0}}, {{'ns_1@10.242.238.90',571},{0,0}}, {{'ns_1@10.242.238.90',572},{0,0}}, {{'ns_1@10.242.238.90',573},{0,0}}, {{'ns_1@10.242.238.90',574},{0,0}}, {{'ns_1@10.242.238.90',575},{0,0}}, {{'ns_1@10.242.238.90',576},{0,0}}, {{'ns_1@10.242.238.90',577},{0,0}}, {{'ns_1@10.242.238.90',578},{0,0}}, {{'ns_1@10.242.238.90',579},{0,0}}, {{'ns_1@10.242.238.90',580},{0,0}}, {{'ns_1@10.242.238.90',581},{0,0}}, {{'ns_1@10.242.238.90',582},{0,0}}, {{'ns_1@10.242.238.90',583},{0,0}}, {{'ns_1@10.242.238.90',584},{0,0}}, {{'ns_1@10.242.238.90',585},{0,0}}, {{'ns_1@10.242.238.90',586},{0,0}}, {{'ns_1@10.242.238.90',587},{0,0}}, {{'ns_1@10.242.238.90',588},{0,0}}, {{'ns_1@10.242.238.90',589},{0,0}}, {{'ns_1@10.242.238.90',590},{0,0}}, {{'ns_1@10.242.238.90',591},{0,0}}, {{'ns_1@10.242.238.90',592},{0,0}}, {{'ns_1@10.242.238.90',593},{0,0}}, {{'ns_1@10.242.238.90',594},{0,0}}, {{'ns_1@10.242.238.90',595},{0,0}}, {{'ns_1@10.242.238.90',596},{0,0}}, {{'ns_1@10.242.238.90',597},{0,0}}, {{'ns_1@10.242.238.90',598},{0,0}}, {{'ns_1@10.242.238.90',599},{0,0}}, {{'ns_1@10.242.238.90',600},{0,0}}, {{'ns_1@10.242.238.90',601},{0,0}}, {{'ns_1@10.242.238.90',602},{0,0}}, {{'ns_1@10.242.238.90',603},{0,0}}, {{'ns_1@10.242.238.90',604},{0,0}}, {{'ns_1@10.242.238.90',605},{0,0}}, {{'ns_1@10.242.238.90',606},{0,0}}, {{'ns_1@10.242.238.90',607},{0,0}}, {{'ns_1@10.242.238.90',608},{0,0}}, {{'ns_1@10.242.238.90',609},{0,0}}, {{'ns_1@10.242.238.90',610},{0,0}}, {{'ns_1@10.242.238.90',611},{0,0}}, {{'ns_1@10.242.238.90',612},{0,0}}, {{'ns_1@10.242.238.90',613},{0,0}}, {{'ns_1@10.242.238.90',614},{0,0}}, {{'ns_1@10.242.238.90',615},{0,0}}, {{'ns_1@10.242.238.90',616},{0,0}}, {{'ns_1@10.242.238.90',617},{0,0}}, {{'ns_1@10.242.238.90',618},{0,0}}, {{'ns_1@10.242.238.90',619},{0,0}}, {{'ns_1@10.242.238.90',620},{0,0}}, {{'ns_1@10.242.238.90',621},{0,0}}, {{'ns_1@10.242.238.90',622},{0,0}}, {{'ns_1@10.242.238.90',623},{0,0}}, {{'ns_1@10.242.238.90',624},{0,0}}, {{'ns_1@10.242.238.90',625},{0,0}}, {{'ns_1@10.242.238.90',626},{0,0}}, {{'ns_1@10.242.238.90',627},{0,0}}, {{'ns_1@10.242.238.90',628},{0,0}}, {{'ns_1@10.242.238.90',629},{0,0}}, {{'ns_1@10.242.238.90',630},{0,0}}, {{'ns_1@10.242.238.90',631},{0,0}}, {{'ns_1@10.242.238.90',632},{0,0}}, {{'ns_1@10.242.238.90',633},{0,0}}, {{'ns_1@10.242.238.90',634},{0,0}}, {{'ns_1@10.242.238.90',635},{0,0}}, {{'ns_1@10.242.238.90',636},{0,0}}, {{'ns_1@10.242.238.90',637},{0,0}}, {{'ns_1@10.242.238.90',638},{0,0}}, {{'ns_1@10.242.238.90',639},{0,0}}, {{'ns_1@10.242.238.90',640},{0,0}}, {{'ns_1@10.242.238.90',641},{0,0}}, {{'ns_1@10.242.238.90',642},{0,0}}, {{'ns_1@10.242.238.90',643},{0,0}}, {{'ns_1@10.242.238.90',644},{0,0}}, {{'ns_1@10.242.238.90',645},{0,0}}, {{'ns_1@10.242.238.90',646},{0,0}}, {{'ns_1@10.242.238.90',647},{0,0}}, {{'ns_1@10.242.238.90',648},{0,0}}, {{'ns_1@10.242.238.90',649},{0,0}}, {{'ns_1@10.242.238.90',650},{0,0}}, {{'ns_1@10.242.238.90',651},{0,0}}, {{'ns_1@10.242.238.90',652},{0,0}}, {{'ns_1@10.242.238.90',653},{0,0}}, {{'ns_1@10.242.238.90',654},{0,0}}, {{'ns_1@10.242.238.90',655},{0,0}}, {{'ns_1@10.242.238.90',656},{0,0}}, {{'ns_1@10.242.238.90',657},{0,0}}, {{'ns_1@10.242.238.90',658},{0,0}}, {{'ns_1@10.242.238.90',659},{0,0}}, {{'ns_1@10.242.238.90',660},{0,0}}, {{'ns_1@10.242.238.90',661},{0,0}}, {{'ns_1@10.242.238.90',662},{0,0}}, {{'ns_1@10.242.238.90',663},{0,0}}, {{'ns_1@10.242.238.90',664},{0,0}}, {{'ns_1@10.242.238.90',665},{0,0}}, {{'ns_1@10.242.238.90',666},{0,0}}, {{'ns_1@10.242.238.90',667},{0,0}}, {{'ns_1@10.242.238.90',668},{0,0}}, {{'ns_1@10.242.238.90',669},{0,0}}, {{'ns_1@10.242.238.90',670},{0,0}}, {{'ns_1@10.242.238.90',671},{0,0}}, {{'ns_1@10.242.238.90',672},{0,0}}, {{'ns_1@10.242.238.90',673},{0,0}}, {{'ns_1@10.242.238.90',674},{0,0}}, {{'ns_1@10.242.238.90',675},{0,0}}, {{'ns_1@10.242.238.90',676},{0,0}}, {{'ns_1@10.242.238.90',677},{0,0}}, {{'ns_1@10.242.238.90',678},{0,0}}, {{'ns_1@10.242.238.90',679},{0,0}}, {{'ns_1@10.242.238.90',680},{0,0}}, {{'ns_1@10.242.238.90',681},{0,0}}, {{'ns_1@10.242.238.90',682},{0,0}}, {{'ns_1@10.242.238.90',683},{0,0}}, {{'ns_1@10.242.238.90',684},{0,0}}, {{'ns_1@10.242.238.90',685},{0,0}}, {{'ns_1@10.242.238.90',686},{0,0}}, {{'ns_1@10.242.238.90',687},{0,0}}, {{'ns_1@10.242.238.90',688},{0,0}}, {{'ns_1@10.242.238.90',689},{0,0}}, {{'ns_1@10.242.238.90',690},{0,0}}, {{'ns_1@10.242.238.90',691},{0,0}}, {{'ns_1@10.242.238.90',692},{0,0}}, {{'ns_1@10.242.238.90',693},{0,0}}, {{'ns_1@10.242.238.90',694},{0,0}}, {{'ns_1@10.242.238.90',695},{0,0}}, {{'ns_1@10.242.238.90',696},{0,0}}, {{'ns_1@10.242.238.90',697},{0,0}}, {{'ns_1@10.242.238.90',698},{0,0}}, {{'ns_1@10.242.238.90',699},{0,0}}, {{'ns_1@10.242.238.90',700},{0,0}}, {{'ns_1@10.242.238.90',701},{0,0}}, {{'ns_1@10.242.238.90',702},{0,0}}, {{'ns_1@10.242.238.90',703},{0,0}}, {{'ns_1@10.242.238.90',704},{0,0}}, {{'ns_1@10.242.238.90',705},{0,0}}, {{'ns_1@10.242.238.90',706},{0,0}}, {{'ns_1@10.242.238.90',707},{0,0}}, {{'ns_1@10.242.238.90',708},{0,0}}, {{'ns_1@10.242.238.90',709},{0,0}}, {{'ns_1@10.242.238.90',710},{0,0}}, {{'ns_1@10.242.238.90',711},{0,0}}, {{'ns_1@10.242.238.90',712},{0,0}}, {{'ns_1@10.242.238.90',713},{0,0}}, {{'ns_1@10.242.238.90',714},{0,0}}, {{'ns_1@10.242.238.90',715},{0,0}}, {{'ns_1@10.242.238.90',716},{0,0}}, {{'ns_1@10.242.238.90',717},{0,0}}, {{'ns_1@10.242.238.90',718},{0,0}}, {{'ns_1@10.242.238.90',719},{0,0}}, {{'ns_1@10.242.238.90',720},{0,0}}, {{'ns_1@10.242.238.90',721},{0,0}}, {{'ns_1@10.242.238.90',722},{0,0}}, {{'ns_1@10.242.238.90',723},{0,0}}, {{'ns_1@10.242.238.90',724},{0,0}}, {{'ns_1@10.242.238.90',725},{0,0}}, {{'ns_1@10.242.238.90',726},{0,0}}, {{'ns_1@10.242.238.90',727},{0,0}}, {{'ns_1@10.242.238.90',728},{0,0}}, {{'ns_1@10.242.238.90',729},{0,0}}, {{'ns_1@10.242.238.90',730},{0,0}}, {{'ns_1@10.242.238.90',731},{0,0}}, {{'ns_1@10.242.238.90',732},{0,0}}, {{'ns_1@10.242.238.90',733},{0,0}}, {{'ns_1@10.242.238.90',734},{0,0}}, {{'ns_1@10.242.238.90',735},{0,0}}, {{'ns_1@10.242.238.90',736},{0,0}}, {{'ns_1@10.242.238.90',737},{0,0}}, {{'ns_1@10.242.238.90',738},{0,0}}, {{'ns_1@10.242.238.90',739},{0,0}}, {{'ns_1@10.242.238.90',740},{0,0}}, {{'ns_1@10.242.238.90',741},{0,0}}, {{'ns_1@10.242.238.90',742},{0,0}}, {{'ns_1@10.242.238.90',743},{0,0}}, {{'ns_1@10.242.238.90',744},{0,0}}, {{'ns_1@10.242.238.90',745},{0,0}}, {{'ns_1@10.242.238.90',746},{0,0}}, {{'ns_1@10.242.238.90',747},{0,0}}, {{'ns_1@10.242.238.90',748},{0,0}}, {{'ns_1@10.242.238.90',749},{0,0}}, {{'ns_1@10.242.238.90',750},{0,0}}, {{'ns_1@10.242.238.90',751},{0,0}}, {{'ns_1@10.242.238.90',752},{0,0}}, {{'ns_1@10.242.238.90',753},{0,0}}, {{'ns_1@10.242.238.90',754},{0,0}}, {{'ns_1@10.242.238.90',755},{0,0}}, {{'ns_1@10.242.238.90',756},{0,0}}, {{'ns_1@10.242.238.90',757},{0,0}}, {{'ns_1@10.242.238.90',758},{0,0}}, {{'ns_1@10.242.238.90',759},{0,0}}, {{'ns_1@10.242.238.90',760},{0,0}}, {{'ns_1@10.242.238.90',761},{0,0}}, {{'ns_1@10.242.238.90',762},{0,0}}, {{'ns_1@10.242.238.90',763},{0,0}}, {{'ns_1@10.242.238.90',764},{0,0}}, {{'ns_1@10.242.238.90',765},{0,0}}, {{'ns_1@10.242.238.90',766},{0,0}}, {{'ns_1@10.242.238.90',767},{0,0}}, {{'ns_1@10.242.238.90',938},{0,0}}, {{'ns_1@10.242.238.90',939},{0,0}}, {{'ns_1@10.242.238.90',940},{0,0}}, {{'ns_1@10.242.238.90',941},{0,0}}, {{'ns_1@10.242.238.90',942},{0,0}}, {{'ns_1@10.242.238.90',943},{0,0}}, {{'ns_1@10.242.238.90',944},{0,0}}, {{'ns_1@10.242.238.90',945},{0,0}}, {{'ns_1@10.242.238.90',946},{0,0}}, {{'ns_1@10.242.238.90',947},{0,0}}, {{'ns_1@10.242.238.90',948},{0,0}}, {{'ns_1@10.242.238.90',949},{0,0}}, {{'ns_1@10.242.238.90',950},{0,0}}, {{'ns_1@10.242.238.90',951},{0,0}}, {{'ns_1@10.242.238.90',952},{0,0}}, {{'ns_1@10.242.238.90',953},{0,0}}, {{'ns_1@10.242.238.90',954},{0,0}}, {{'ns_1@10.242.238.90',955},{0,0}}, {{'ns_1@10.242.238.90',956},{0,0}}, {{'ns_1@10.242.238.90',957},{0,0}}, {{'ns_1@10.242.238.90',958},{0,0}}, {{'ns_1@10.242.238.90',959},{0,0}}, {{'ns_1@10.242.238.90',960},{0,0}}, {{'ns_1@10.242.238.90',961},{0,0}}, {{'ns_1@10.242.238.90',962},{0,0}}, {{'ns_1@10.242.238.90',963},{0,0}}, {{'ns_1@10.242.238.90',964},{0,0}}, {{'ns_1@10.242.238.90',965},{0,0}}, {{'ns_1@10.242.238.90',966},{0,0}}, {{'ns_1@10.242.238.90',967},{0,0}}, {{'ns_1@10.242.238.90',968},{0,0}}, {{'ns_1@10.242.238.90',969},{0,0}}, {{'ns_1@10.242.238.90',970},{0,0}}, {{'ns_1@10.242.238.90',971},{0,0}}, {{'ns_1@10.242.238.90',972},{0,0}}, {{'ns_1@10.242.238.90',973},{0,0}}, {{'ns_1@10.242.238.90',974},{0,0}}, {{'ns_1@10.242.238.90',975},{0,0}}, {{'ns_1@10.242.238.90',976},{0,0}}, {{'ns_1@10.242.238.90',977},{0,0}}, {{'ns_1@10.242.238.90',978},{0,0}}, {{'ns_1@10.242.238.90',979},{0,0}}, {{'ns_1@10.242.238.90',980},{0,0}}, {{'ns_1@10.242.238.90',981},{0,0}}, {{'ns_1@10.242.238.90',982},{0,0}}, {{'ns_1@10.242.238.90',983},{0,0}}, {{'ns_1@10.242.238.90',984},{0,0}}, {{'ns_1@10.242.238.90',985},{0,0}}, {{'ns_1@10.242.238.90',986},{0,0}}, {{'ns_1@10.242.238.90',987},{0,0}}, {{'ns_1@10.242.238.90',988},{0,0}}, {{'ns_1@10.242.238.90',989},{0,0}}, {{'ns_1@10.242.238.90',990},{0,0}}, {{'ns_1@10.242.238.90',991},{0,0}}, {{'ns_1@10.242.238.90',992},{0,0}}, {{'ns_1@10.242.238.90',993},{0,0}}, {{'ns_1@10.242.238.90',994},{0,0}}, {{'ns_1@10.242.238.90',995},{0,0}}, {{'ns_1@10.242.238.90',996},{0,0}}, {{'ns_1@10.242.238.90',997},{0,0}}, {{'ns_1@10.242.238.90',998},{0,0}}, {{'ns_1@10.242.238.90',999},{0,0}}, {{'ns_1@10.242.238.90',1000},{0,0}}, {{'ns_1@10.242.238.90',1001},{0,0}}, {{'ns_1@10.242.238.90',1002},{0,0}}, {{'ns_1@10.242.238.90',1003},{0,0}}, {{'ns_1@10.242.238.90',1004},{0,0}}, {{'ns_1@10.242.238.90',1005},{0,0}}, {{'ns_1@10.242.238.90',1006},{0,0}}, {{'ns_1@10.242.238.90',1007},{0,0}}, {{'ns_1@10.242.238.90',1008},{0,0}}, {{'ns_1@10.242.238.90',1009},{0,0}}, {{'ns_1@10.242.238.90',1010},{0,0}}, {{'ns_1@10.242.238.90',1011},{0,0}}, {{'ns_1@10.242.238.90',1012},{0,0}}, {{'ns_1@10.242.238.90',1013},{0,0}}, {{'ns_1@10.242.238.90',1014},{0,0}}, {{'ns_1@10.242.238.90',1015},{0,0}}, {{'ns_1@10.242.238.90',1016},{0,0}}, {{'ns_1@10.242.238.90',1017},{0,0}}, {{'ns_1@10.242.238.90',1018},{0,0}}, {{'ns_1@10.242.238.90',1019},{0,0}}, {{'ns_1@10.242.238.90',1020},{0,0}}, {{'ns_1@10.242.238.90',1021},{0,0}}, {{'ns_1@10.242.238.90',1022},{0,0}}, {{'ns_1@10.242.238.90',1023},{0,0}}, {{'ns_1@10.242.238.91',171},{0,0}}, {{'ns_1@10.242.238.91',172},{0,0}}, {{'ns_1@10.242.238.91',173},{0,0}}, {{'ns_1@10.242.238.91',174},{0,0}}, {{'ns_1@10.242.238.91',175},{0,0}}, {{'ns_1@10.242.238.91',176},{0,0}}, {{'ns_1@10.242.238.91',177},{0,0}}, {{'ns_1@10.242.238.91',178},{0,0}}, {{'ns_1@10.242.238.91',179},{0,0}}, {{'ns_1@10.242.238.91',180},{0,0}}, {{'ns_1@10.242.238.91',181},{0,0}}, {{'ns_1@10.242.238.91',182},{0,0}}, {{'ns_1@10.242.238.91',183},{0,0}}, {{'ns_1@10.242.238.91',184},{0,0}}, {{'ns_1@10.242.238.91',185},{0,0}}, {{'ns_1@10.242.238.91',186},{0,0}}, {{'ns_1@10.242.238.91',187},{0,0}}, {{'ns_1@10.242.238.91',188},{0,0}}, {{'ns_1@10.242.238.91',189},{0,0}}, {{'ns_1@10.242.238.91',190},{0,0}}, {{'ns_1@10.242.238.91',191},{0,0}}, {{'ns_1@10.242.238.91',192},{0,0}}, {{'ns_1@10.242.238.91',193},{0,0}}, {{'ns_1@10.242.238.91',194},{0,0}}, {{'ns_1@10.242.238.91',195},{0,0}}, {{'ns_1@10.242.238.91',196},{0,0}}, {{'ns_1@10.242.238.91',197},{0,0}}, {{'ns_1@10.242.238.91',198},{0,0}}, {{'ns_1@10.242.238.91',199},{0,0}}, {{'ns_1@10.242.238.91',200},{0,0}}, {{'ns_1@10.242.238.91',201},{0,0}}, {{'ns_1@10.242.238.91',202},{0,0}}, {{'ns_1@10.242.238.91',203},{0,0}}, {{'ns_1@10.242.238.91',204},{0,0}}, {{'ns_1@10.242.238.91',205},{0,0}}, {{'ns_1@10.242.238.91',206},{0,0}}, {{'ns_1@10.242.238.91',207},{0,0}}, {{'ns_1@10.242.238.91',208},{0,0}}, {{'ns_1@10.242.238.91',209},{0,0}}, {{'ns_1@10.242.238.91',210},{0,0}}, {{'ns_1@10.242.238.91',211},{0,0}}, {{'ns_1@10.242.238.91',212},{0,0}}, {{'ns_1@10.242.238.91',213},{0,0}}, {{'ns_1@10.242.238.91',214},{0,0}}, {{'ns_1@10.242.238.91',215},{0,0}}, {{'ns_1@10.242.238.91',216},{0,0}}, {{'ns_1@10.242.238.91',217},{0,0}}, {{'ns_1@10.242.238.91',218},{0,0}}, {{'ns_1@10.242.238.91',219},{0,0}}, {{'ns_1@10.242.238.91',220},{0,0}}, {{'ns_1@10.242.238.91',221},{0,0}}, {{'ns_1@10.242.238.91',222},{0,0}}, {{'ns_1@10.242.238.91',223},{0,0}}, {{'ns_1@10.242.238.91',224},{0,0}}, {{'ns_1@10.242.238.91',225},{0,0}}, {{'ns_1@10.242.238.91',226},{0,0}}, {{'ns_1@10.242.238.91',227},{0,0}}, {{'ns_1@10.242.238.91',228},{0,0}}, {{'ns_1@10.242.238.91',229},{0,0}}, {{'ns_1@10.242.238.91',230},{0,0}}, {{'ns_1@10.242.238.91',231},{0,0}}, {{'ns_1@10.242.238.91',232},{0,0}}, {{'ns_1@10.242.238.91',233},{0,0}}, {{'ns_1@10.242.238.91',234},{0,0}}, {{'ns_1@10.242.238.91',235},{0,0}}, {{'ns_1@10.242.238.91',236},{0,0}}, {{'ns_1@10.242.238.91',237},{0,0}}, {{'ns_1@10.242.238.91',238},{0,0}}, {{'ns_1@10.242.238.91',239},{0,0}}, {{'ns_1@10.242.238.91',240},{0,0}}, {{'ns_1@10.242.238.91',241},{0,0}}, {{'ns_1@10.242.238.91',242},{0,0}}, {{'ns_1@10.242.238.91',243},{0,0}}, {{'ns_1@10.242.238.91',244},{0,0}}, {{'ns_1@10.242.238.91',245},{0,0}}, {{'ns_1@10.242.238.91',246},{0,0}}, {{'ns_1@10.242.238.91',247},{0,0}}, {{'ns_1@10.242.238.91',248},{0,0}}, {{'ns_1@10.242.238.91',249},{0,0}}, {{'ns_1@10.242.238.91',250},{0,0}}, {{'ns_1@10.242.238.91',251},{0,0}}, {{'ns_1@10.242.238.91',252},{0,0}}, {{'ns_1@10.242.238.91',253},{0,0}}, {{'ns_1@10.242.238.91',254},{0,0}}, {{'ns_1@10.242.238.91',255},{0,0}}, {{'ns_1@10.242.238.91',427},{0,0}}, {{'ns_1@10.242.238.91',428},{0,0}}, {{'ns_1@10.242.238.91',429},{0,0}}, {{'ns_1@10.242.238.91',430},{0,0}}, {{'ns_1@10.242.238.91',431},{0,0}}, {{'ns_1@10.242.238.91',432},{0,0}}, {{'ns_1@10.242.238.91',433},{0,0}}, {{'ns_1@10.242.238.91',434},{0,0}}, {{'ns_1@10.242.238.91',435},{0,0}}, {{'ns_1@10.242.238.91',436},{0,0}}, {{'ns_1@10.242.238.91',437},{0,0}}, {{'ns_1@10.242.238.91',438},{0,0}}, {{'ns_1@10.242.238.91',439},{0,0}}, {{'ns_1@10.242.238.91',440},{0,0}}, {{'ns_1@10.242.238.91',441},{0,0}}, {{'ns_1@10.242.238.91',442},{0,0}}, {{'ns_1@10.242.238.91',443},{0,0}}, {{'ns_1@10.242.238.91',444},{0,0}}, {{'ns_1@10.242.238.91',445},{0,0}}, {{'ns_1@10.242.238.91',446},{0,0}}, {{'ns_1@10.242.238.91',447},{0,0}}, {{'ns_1@10.242.238.91',448},{0,0}}, {{'ns_1@10.242.238.91',449},{0,0}}, {{'ns_1@10.242.238.91',450},{0,0}}, {{'ns_1@10.242.238.91',451},{0,0}}, {{'ns_1@10.242.238.91',452},{0,0}}, {{'ns_1@10.242.238.91',453},{0,0}}, {{'ns_1@10.242.238.91',454},{0,0}}, {{'ns_1@10.242.238.91',455},{0,0}}, {{'ns_1@10.242.238.91',456},{0,0}}, {{'ns_1@10.242.238.91',457},{0,0}}, {{'ns_1@10.242.238.91',458},{0,0}}, {{'ns_1@10.242.238.91',459},{0,0}}, {{'ns_1@10.242.238.91',460},{0,0}}, {{'ns_1@10.242.238.91',461},{0,0}}, {{'ns_1@10.242.238.91',462},{0,0}}, {{'ns_1@10.242.238.91',463},{0,0}}, {{'ns_1@10.242.238.91',464},{0,0}}, {{'ns_1@10.242.238.91',465},{0,0}}, {{'ns_1@10.242.238.91',466},{0,0}}, {{'ns_1@10.242.238.91',467},{0,0}}, {{'ns_1@10.242.238.91',468},{0,0}}, {{'ns_1@10.242.238.91',469},{0,0}}, {{'ns_1@10.242.238.91',470},{0,0}}, {{'ns_1@10.242.238.91',471},{0,0}}, {{'ns_1@10.242.238.91',472},{0,0}}, {{'ns_1@10.242.238.91',473},{0,0}}, {{'ns_1@10.242.238.91',474},{0,0}}, {{'ns_1@10.242.238.91',475},{0,0}}, {{'ns_1@10.242.238.91',476},{0,0}}, {{'ns_1@10.242.238.91',477},{0,0}}, {{'ns_1@10.242.238.91',478},{0,0}}, {{'ns_1@10.242.238.91',479},{0,0}}, {{'ns_1@10.242.238.91',480},{0,0}}, {{'ns_1@10.242.238.91',481},{0,0}}, {{'ns_1@10.242.238.91',482},{0,0}}, {{'ns_1@10.242.238.91',483},{0,0}}, {{'ns_1@10.242.238.91',484},{0,0}}, {{'ns_1@10.242.238.91',485},{0,0}}, {{'ns_1@10.242.238.91',486},{0,0}}, {{'ns_1@10.242.238.91',487},{0,0}}, {{'ns_1@10.242.238.91',488},{0,0}}, {{'ns_1@10.242.238.91',489},{0,0}}, {{'ns_1@10.242.238.91',490},{0,0}}, {{'ns_1@10.242.238.91',491},{0,0}}, {{'ns_1@10.242.238.91',492},{0,0}}, {{'ns_1@10.242.238.91',493},{0,0}}, {{'ns_1@10.242.238.91',494},{0,0}}, {{'ns_1@10.242.238.91',495},{0,0}}, {{'ns_1@10.242.238.91',496},{0,0}}, {{'ns_1@10.242.238.91',497},{0,0}}, {{'ns_1@10.242.238.91',498},{0,0}}, {{'ns_1@10.242.238.91',499},{0,0}}, {{'ns_1@10.242.238.91',500},{0,0}}, {{'ns_1@10.242.238.91',501},{0,0}}, {{'ns_1@10.242.238.91',502},{0,0}}, {{'ns_1@10.242.238.91',503},{0,0}}, {{'ns_1@10.242.238.91',504},{0,0}}, {{'ns_1@10.242.238.91',505},{0,0}}, {{'ns_1@10.242.238.91',506},{0,0}}, {{'ns_1@10.242.238.91',507},{0,0}}, {{'ns_1@10.242.238.91',508},{0,0}}, {{'ns_1@10.242.238.91',509},{0,0}}, {{'ns_1@10.242.238.91',510},{0,0}}, {{'ns_1@10.242.238.91',511},{0,0}}, {{'ns_1@10.242.238.91',682},{0,0}}, {{'ns_1@10.242.238.91',683},{0,0}}, {{'ns_1@10.242.238.91',684},{0,0}}, {{'ns_1@10.242.238.91',685},{0,0}}, {{'ns_1@10.242.238.91',686},{0,0}}, {{'ns_1@10.242.238.91',687},{0,0}}, {{'ns_1@10.242.238.91',688},{0,0}}, {{'ns_1@10.242.238.91',689},{0,0}}, {{'ns_1@10.242.238.91',690},{0,0}}, {{'ns_1@10.242.238.91',691},{0,0}}, {{'ns_1@10.242.238.91',692},{0,0}}, {{'ns_1@10.242.238.91',693},{0,0}}, {{'ns_1@10.242.238.91',694},{0,0}}, {{'ns_1@10.242.238.91',695},{0,0}}, {{'ns_1@10.242.238.91',696},{0,0}}, {{'ns_1@10.242.238.91',697},{0,0}}, {{'ns_1@10.242.238.91',698},{0,0}}, {{'ns_1@10.242.238.91',699},{0,0}}, {{'ns_1@10.242.238.91',700},{0,0}}, {{'ns_1@10.242.238.91',701},{0,0}}, {{'ns_1@10.242.238.91',702},{0,0}}, {{'ns_1@10.242.238.91',703},{0,0}}, {{'ns_1@10.242.238.91',704},{0,0}}, {{'ns_1@10.242.238.91',705},{0,0}}, {{'ns_1@10.242.238.91',706},{0,0}}, {{'ns_1@10.242.238.91',707},{0,0}}, {{'ns_1@10.242.238.91',708},{0,0}}, {{'ns_1@10.242.238.91',709},{0,0}}, {{'ns_1@10.242.238.91',710},{0,0}}, {{'ns_1@10.242.238.91',711},{0,0}}, {{'ns_1@10.242.238.91',712},{0,0}}, {{'ns_1@10.242.238.91',713},{0,0}}, {{'ns_1@10.242.238.91',714},{0,0}}, {{'ns_1@10.242.238.91',715},{0,0}}, {{'ns_1@10.242.238.91',716},{0,0}}, {{'ns_1@10.242.238.91',717},{0,0}}, {{'ns_1@10.242.238.91',718},{0,0}}, {{'ns_1@10.242.238.91',719},{0,0}}, {{'ns_1@10.242.238.91',720},{0,0}}, {{'ns_1@10.242.238.91',721},{0,0}}, {{'ns_1@10.242.238.91',722},{0,0}}, {{'ns_1@10.242.238.91',723},{0,0}}, {{'ns_1@10.242.238.91',724},{0,0}}, {{'ns_1@10.242.238.91',725},{0,0}}, {{'ns_1@10.242.238.91',726},{0,0}}, {{'ns_1@10.242.238.91',727},{0,0}}, {{'ns_1@10.242.238.91',728},{0,0}}, {{'ns_1@10.242.238.91',729},{0,0}}, {{'ns_1@10.242.238.91',730},{0,0}}, {{'ns_1@10.242.238.91',731},{0,0}}, {{'ns_1@10.242.238.91',732},{0,0}}, {{'ns_1@10.242.238.91',733},{0,0}}, {{'ns_1@10.242.238.91',734},{0,0}}, {{'ns_1@10.242.238.91',735},{0,0}}, {{'ns_1@10.242.238.91',736},{0,0}}, {{'ns_1@10.242.238.91',737},{0,0}}, {{'ns_1@10.242.238.91',738},{0,0}}, {{'ns_1@10.242.238.91',739},{0,0}}, {{'ns_1@10.242.238.91',740},{0,0}}, {{'ns_1@10.242.238.91',741},{0,0}}, {{'ns_1@10.242.238.91',742},{0,0}}, {{'ns_1@10.242.238.91',743},{0,0}}, {{'ns_1@10.242.238.91',744},{0,0}}, {{'ns_1@10.242.238.91',745},{0,0}}, {{'ns_1@10.242.238.91',746},{0,0}}, {{'ns_1@10.242.238.91',747},{0,0}}, {{'ns_1@10.242.238.91',748},{0,0}}, {{'ns_1@10.242.238.91',749},{0,0}}, {{'ns_1@10.242.238.91',750},{0,0}}, {{'ns_1@10.242.238.91',751},{0,0}}, {{'ns_1@10.242.238.91',752},{0,0}}, {{'ns_1@10.242.238.91',753},{0,0}}, {{'ns_1@10.242.238.91',754},{0,0}}, {{'ns_1@10.242.238.91',755},{0,0}}, {{'ns_1@10.242.238.91',756},{0,0}}, {{'ns_1@10.242.238.91',757},{0,0}}, {{'ns_1@10.242.238.91',758},{0,0}}, {{'ns_1@10.242.238.91',759},{0,0}}, {{'ns_1@10.242.238.91',760},{0,0}}, {{'ns_1@10.242.238.91',761},{0,0}}, {{'ns_1@10.242.238.91',762},{0,0}}, {{'ns_1@10.242.238.91',763},{0,0}}, {{'ns_1@10.242.238.91',764},{0,0}}, {{'ns_1@10.242.238.91',765},{0,0}}, {{'ns_1@10.242.238.91',766},{0,0}}, {{'ns_1@10.242.238.91',767},{0,0}}, {{'ns_1@10.242.238.91',768},{0,0}}, {{'ns_1@10.242.238.91',769},{0,0}}, {{'ns_1@10.242.238.91',770},{0,0}}, {{'ns_1@10.242.238.91',771},{0,0}}, {{'ns_1@10.242.238.91',772},{0,0}}, {{'ns_1@10.242.238.91',773},{0,0}}, {{'ns_1@10.242.238.91',774},{0,0}}, {{'ns_1@10.242.238.91',775},{0,0}}, {{'ns_1@10.242.238.91',776},{0,0}}, {{'ns_1@10.242.238.91',777},{0,0}}, {{'ns_1@10.242.238.91',778},{0,0}}, {{'ns_1@10.242.238.91',779},{0,0}}, {{'ns_1@10.242.238.91',780},{0,0}}, {{'ns_1@10.242.238.91',781},{0,0}}, {{'ns_1@10.242.238.91',782},{0,0}}, {{'ns_1@10.242.238.91',783},{0,0}}, {{'ns_1@10.242.238.91',784},{0,0}}, {{'ns_1@10.242.238.91',785},{0,0}}, {{'ns_1@10.242.238.91',786},{0,0}}, {{'ns_1@10.242.238.91',787},{0,0}}, {{'ns_1@10.242.238.91',788},{0,0}}, {{'ns_1@10.242.238.91',789},{0,0}}, {{'ns_1@10.242.238.91',790},{0,0}}, {{'ns_1@10.242.238.91',791},{0,0}}, {{'ns_1@10.242.238.91',792},{0,0}}, {{'ns_1@10.242.238.91',793},{0,0}}, {{'ns_1@10.242.238.91',794},{0,0}}, {{'ns_1@10.242.238.91',795},{0,0}}, {{'ns_1@10.242.238.91',796},{0,0}}, {{'ns_1@10.242.238.91',797},{0,0}}, {{'ns_1@10.242.238.91',798},{0,0}}, {{'ns_1@10.242.238.91',799},{0,0}}, {{'ns_1@10.242.238.91',800},{0,0}}, {{'ns_1@10.242.238.91',801},{0,0}}, {{'ns_1@10.242.238.91',802},{0,0}}, {{'ns_1@10.242.238.91',803},{0,0}}, {{'ns_1@10.242.238.91',804},{0,0}}, {{'ns_1@10.242.238.91',805},{0,0}}, {{'ns_1@10.242.238.91',806},{0,0}}, {{'ns_1@10.242.238.91',807},{0,0}}, {{'ns_1@10.242.238.91',808},{0,0}}, {{'ns_1@10.242.238.91',809},{0,0}}, {{'ns_1@10.242.238.91',810},{0,0}}, {{'ns_1@10.242.238.91',811},{0,0}}, {{'ns_1@10.242.238.91',812},{0,0}}, {{'ns_1@10.242.238.91',813},{0,0}}, {{'ns_1@10.242.238.91',814},{0,0}}, {{'ns_1@10.242.238.91',815},{0,0}}, {{'ns_1@10.242.238.91',816},{0,0}}, {{'ns_1@10.242.238.91',817},{0,0}}, {{'ns_1@10.242.238.91',818},{0,0}}, {{'ns_1@10.242.238.91',819},{0,0}}, {{'ns_1@10.242.238.91',820},{0,0}}, {{'ns_1@10.242.238.91',821},{0,0}}, {{'ns_1@10.242.238.91',822},{0,0}}, {{'ns_1@10.242.238.91',823},{0,0}}, {{'ns_1@10.242.238.91',824},{0,0}}, {{'ns_1@10.242.238.91',825},{0,0}}, {{'ns_1@10.242.238.91',826},{0,0}}, {{'ns_1@10.242.238.91',827},{0,0}}, {{'ns_1@10.242.238.91',828},{0,0}}, {{'ns_1@10.242.238.91',829},{0,0}}, {{'ns_1@10.242.238.91',830},{0,0}}, {{'ns_1@10.242.238.91',831},{0,0}}, {{'ns_1@10.242.238.91',832},{0,0}}, {{'ns_1@10.242.238.91',833},{0,0}}, {{'ns_1@10.242.238.91',834},{0,0}}, {{'ns_1@10.242.238.91',835},{0,0}}, {{'ns_1@10.242.238.91',836},{0,0}}, {{'ns_1@10.242.238.91',837},{0,0}}, {{'ns_1@10.242.238.91',838},{0,0}}, {{'ns_1@10.242.238.91',839},{0,0}}, {{'ns_1@10.242.238.91',840},{0,0}}, {{'ns_1@10.242.238.91',841},{0,0}}, {{'ns_1@10.242.238.91',842},{0,0}}, {{'ns_1@10.242.238.91',843},{0,0}}, {{'ns_1@10.242.238.91',844},{0,0}}, {{'ns_1@10.242.238.91',845},{0,0}}, {{'ns_1@10.242.238.91',846},{0,0}}, {{'ns_1@10.242.238.91',847},{0,0}}, {{'ns_1@10.242.238.91',848},{0,0}}, {{'ns_1@10.242.238.91',849},{0,0}}, {{'ns_1@10.242.238.91',850},{0,0}}, {{'ns_1@10.242.238.91',851},{0,0}}, {{'ns_1@10.242.238.91',852},{0,0}}, {{'ns_1@10.242.238.91',853},{0,0}}, {{'ns_1@10.242.238.91',854},{0,0}}, {{'ns_1@10.242.238.91',855},{0,0}}, {{'ns_1@10.242.238.91',856},{0,0}}, {{'ns_1@10.242.238.91',857},{0,0}}, {{'ns_1@10.242.238.91',858},{0,0}}, {{'ns_1@10.242.238.91',859},{0,0}}, {{'ns_1@10.242.238.91',860},{0,0}}, {{'ns_1@10.242.238.91',861},{0,0}}, {{'ns_1@10.242.238.91',862},{0,0}}, {{'ns_1@10.242.238.91',863},{0,0}}, {{'ns_1@10.242.238.91',864},{0,0}}, {{'ns_1@10.242.238.91',865},{0,0}}, {{'ns_1@10.242.238.91',866},{0,0}}, {{'ns_1@10.242.238.91',867},{0,0}}, {{'ns_1@10.242.238.91',868},{0,0}}, {{'ns_1@10.242.238.91',869},{0,0}}, {{'ns_1@10.242.238.91',870},{0,0}}, {{'ns_1@10.242.238.91',871},{0,0}}, {{'ns_1@10.242.238.91',872},{0,0}}, {{'ns_1@10.242.238.91',873},{0,0}}, {{'ns_1@10.242.238.91',874},{0,0}}, {{'ns_1@10.242.238.91',875},{0,0}}, {{'ns_1@10.242.238.91',876},{0,0}}, {{'ns_1@10.242.238.91',877},{0,0}}, {{'ns_1@10.242.238.91',878},{0,0}}, {{'ns_1@10.242.238.91',879},{0,0}}, {{'ns_1@10.242.238.91',880},{0,0}}, {{'ns_1@10.242.238.91',881},{0,0}}, {{'ns_1@10.242.238.91',882},{0,0}}, {{'ns_1@10.242.238.91',883},{0,0}}, {{'ns_1@10.242.238.91',884},{0,0}}, {{'ns_1@10.242.238.91',885},{0,0}}, {{'ns_1@10.242.238.91',886},{0,0}}, {{'ns_1@10.242.238.91',887},{0,0}}, {{'ns_1@10.242.238.91',888},{0,0}}, {{'ns_1@10.242.238.91',889},{0,0}}, {{'ns_1@10.242.238.91',890},{0,0}}, {{'ns_1@10.242.238.91',891},{0,0}}, {{'ns_1@10.242.238.91',892},{0,0}}, {{'ns_1@10.242.238.91',893},{0,0}}, {{'ns_1@10.242.238.91',894},{0,0}}, {{'ns_1@10.242.238.91',895},{0,0}}, {{'ns_1@10.242.238.91',896},{0,0}}, {{'ns_1@10.242.238.91',897},{0,0}}, {{'ns_1@10.242.238.91',898},{0,0}}, {{'ns_1@10.242.238.91',899},{0,0}}, {{'ns_1@10.242.238.91',900},{0,0}}, {{'ns_1@10.242.238.91',901},{0,0}}, {{'ns_1@10.242.238.91',902},{0,0}}, {{'ns_1@10.242.238.91',903},{0,0}}, {{'ns_1@10.242.238.91',904},{0,0}}, {{'ns_1@10.242.238.91',905},{0,0}}, {{'ns_1@10.242.238.91',906},{0,0}}, {{'ns_1@10.242.238.91',907},{0,0}}, {{'ns_1@10.242.238.91',908},{0,0}}, {{'ns_1@10.242.238.91',909},{0,0}}, {{'ns_1@10.242.238.91',910},{0,0}}, {{'ns_1@10.242.238.91',911},{0,0}}, {{'ns_1@10.242.238.91',912},{0,0}}, {{'ns_1@10.242.238.91',913},{0,0}}, {{'ns_1@10.242.238.91',914},{0,0}}, {{'ns_1@10.242.238.91',915},{0,0}}, {{'ns_1@10.242.238.91',916},{0,0}}, {{'ns_1@10.242.238.91',917},{0,0}}, {{'ns_1@10.242.238.91',918},{0,0}}, {{'ns_1@10.242.238.91',919},{0,0}}, {{'ns_1@10.242.238.91',920},{0,0}}, {{'ns_1@10.242.238.91',921},{0,0}}, {{'ns_1@10.242.238.91',922},{0,0}}, {{'ns_1@10.242.238.91',923},{0,0}}, {{'ns_1@10.242.238.91',924},{0,0}}, {{'ns_1@10.242.238.91',925},{0,0}}, {{'ns_1@10.242.238.91',926},{0,0}}, {{'ns_1@10.242.238.91',927},{0,0}}, {{'ns_1@10.242.238.91',928},{0,0}}, {{'ns_1@10.242.238.91',929},{0,0}}, {{'ns_1@10.242.238.91',930},{0,0}}, {{'ns_1@10.242.238.91',931},{0,0}}, {{'ns_1@10.242.238.91',932},{0,0}}, {{'ns_1@10.242.238.91',933},{0,0}}, {{'ns_1@10.242.238.91',934},{0,0}}, {{'ns_1@10.242.238.91',935},{0,0}}, {{'ns_1@10.242.238.91',936},{0,0}}, {{'ns_1@10.242.238.91',937},{0,0}}, {{'ns_1@10.242.238.91',938},{0,0}}, {{'ns_1@10.242.238.91',939},{0,0}}, {{'ns_1@10.242.238.91',940},{0,0}}, {{'ns_1@10.242.238.91',941},{0,0}}, {{'ns_1@10.242.238.91',942},{0,0}}, {{'ns_1@10.242.238.91',943},{0,0}}, {{'ns_1@10.242.238.91',944},{0,0}}, {{'ns_1@10.242.238.91',945},{0,0}}, {{'ns_1@10.242.238.91',946},{0,0}}, {{'ns_1@10.242.238.91',947},{0,0}}, {{'ns_1@10.242.238.91',948},{0,0}}, {{'ns_1@10.242.238.91',949},{0,0}}, {{'ns_1@10.242.238.91',950},{0,0}}, {{'ns_1@10.242.238.91',951},{0,0}}, {{'ns_1@10.242.238.91',952},{0,0}}, {{'ns_1@10.242.238.91',953},{0,0}}, {{'ns_1@10.242.238.91',954},{0,0}}, {{'ns_1@10.242.238.91',955},{0,0}}, {{'ns_1@10.242.238.91',956},{0,0}}, {{'ns_1@10.242.238.91',957},{0,0}}, {{'ns_1@10.242.238.91',958},{0,0}}, {{'ns_1@10.242.238.91',959},{0,0}}, {{'ns_1@10.242.238.91',960},{0,0}}, {{'ns_1@10.242.238.91',961},{0,0}}, {{'ns_1@10.242.238.91',962},{0,0}}, {{'ns_1@10.242.238.91',963},{0,0}}, {{'ns_1@10.242.238.91',964},{0,0}}, {{'ns_1@10.242.238.91',965},{0,0}}, {{'ns_1@10.242.238.91',966},{0,0}}, {{'ns_1@10.242.238.91',967},{0,0}}, {{'ns_1@10.242.238.91',968},{0,0}}, {{'ns_1@10.242.238.91',969},{0,0}}, {{'ns_1@10.242.238.91',970},{0,0}}, {{'ns_1@10.242.238.91',971},{0,0}}, {{'ns_1@10.242.238.91',972},{0,0}}, {{'ns_1@10.242.238.91',973},{0,0}}, {{'ns_1@10.242.238.91',974},{0,0}}, {{'ns_1@10.242.238.91',975},{0,0}}, {{'ns_1@10.242.238.91',976},{0,0}}, {{'ns_1@10.242.238.91',977},{0,0}}, {{'ns_1@10.242.238.91',978},{0,0}}, {{'ns_1@10.242.238.91',979},{0,0}}, {{'ns_1@10.242.238.91',980},{0,0}}, {{'ns_1@10.242.238.91',981},{0,0}}, {{'ns_1@10.242.238.91',982},{0,0}}, {{'ns_1@10.242.238.91',983},{0,0}}, {{'ns_1@10.242.238.91',984},{0,0}}, {{'ns_1@10.242.238.91',985},{0,0}}, {{'ns_1@10.242.238.91',986},{0,0}}, {{'ns_1@10.242.238.91',987},{0,0}}, {{'ns_1@10.242.238.91',988},{0,0}}, {{'ns_1@10.242.238.91',989},{0,0}}, {{'ns_1@10.242.238.91',990},{0,0}}, {{'ns_1@10.242.238.91',991},{0,0}}, {{'ns_1@10.242.238.91',992},{0,0}}, {{'ns_1@10.242.238.91',993},{0,0}}, {{'ns_1@10.242.238.91',994},{0,0}}, {{'ns_1@10.242.238.91',995},{0,0}}, {{'ns_1@10.242.238.91',996},{0,0}}, {{'ns_1@10.242.238.91',997},{0,0}}, {{'ns_1@10.242.238.91',998},{0,0}}, {{'ns_1@10.242.238.91',999},{0,0}}, {{'ns_1@10.242.238.91',1000},{0,0}}, {{'ns_1@10.242.238.91',1001},{0,0}}, {{'ns_1@10.242.238.91',1002},{0,0}}, {{'ns_1@10.242.238.91',1003},{0,0}}, {{'ns_1@10.242.238.91',1004},{0,0}}, {{'ns_1@10.242.238.91',1005},{0,0}}, {{'ns_1@10.242.238.91',1006},{0,0}}, {{'ns_1@10.242.238.91',1007},{0,0}}, {{'ns_1@10.242.238.91',1008},{0,0}}, {{'ns_1@10.242.238.91',1009},{0,0}}, {{'ns_1@10.242.238.91',1010},{0,0}}, {{'ns_1@10.242.238.91',1011},{0,0}}, {{'ns_1@10.242.238.91',1012},{0,0}}, {{'ns_1@10.242.238.91',1013},{0,0}}, {{'ns_1@10.242.238.91',1014},{0,0}}, {{'ns_1@10.242.238.91',1015},{0,0}}, {{'ns_1@10.242.238.91',1016},{0,0}}, {{'ns_1@10.242.238.91',1017},{0,0}}, {{'ns_1@10.242.238.91',1018},{0,0}}, {{'ns_1@10.242.238.91',1019},{0,0}}, {{'ns_1@10.242.238.91',1020},{0,0}}, {{'ns_1@10.242.238.91',1021},{0,0}}, {{'ns_1@10.242.238.91',1022},{0,0}}, {{'ns_1@10.242.238.91',1023},{0,0}}] [ns_server:debug,2014-08-19T16:49:34.320,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 172. Nacking mccouch update. [views:debug,2014-08-19T16:49:34.321,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/172. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:34.321,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",172,active,0} [ns_server:debug,2014-08-19T16:49:34.323,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854, 488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,176, 904,776,410,282,644,516,878,384,256,1006,746,618,252,980,852,486,358,720,656, 592,528,226,954,890,826,460,396,332,268,1018,758,694,630,566,200,992,928,864, 800,498,434,370,306,732,668,604,540,238,174,966,902,838,774,472,408,344,280, 706,642,578,514,212,940,876,812,510,446,382,318,1004,744,680,616,552,250,186, 978,914,850,786,484,420,356,292,718,654,590,526,224,952,888,824,458,394,330, 266,1016,756,692,628,564,198,990,926,862,798,496,432,368,304,730,666,602,538, 236,172,964,900,836,772,470,406,342,278,704,640,576,512,210,938,874,810,508, 444,380,316,1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290,716, 652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924, 860,796,494,430,366,302,728,664,600,536,234,962,898,834,770,468,404,340,276, 766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246,182, 974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390,326, 262,1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598,534, 232,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806,504, 440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350,286,712,648, 584,520,218,1023,946,882,818,452,388,324,260,1010,686,558,192,920,792,426, 298,660,532,894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842,476, 348,710,582,216,944,816,450,322,684,556,190,918,790,424,296,658,530,892,398, 270,1020,760,632,994,866,500,372,734,606,240,968,840,474,346,708,580,214,942, 814,448,320,682,554,188,916,788,422,294] [views:debug,2014-08-19T16:49:34.372,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/172. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:34.372,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",172,active,0} [ns_server:debug,2014-08-19T16:49:34.409,ns_1@10.242.238.88:<0.25804.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.18847.0>}, {'ns_1@10.242.238.91',<18126.18802.0>}]) [rebalance:info,2014-08-19T16:49:34.409,ns_1@10.242.238.88:<0.25795.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:34.410,ns_1@10.242.238.88:<0.25795.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 767 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:34.410,ns_1@10.242.238.88:<0.25795.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:34.411,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{767, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:34.419,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{511, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:34.419,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",511, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.25816.0>) [ns_server:debug,2014-08-19T16:49:34.420,ns_1@10.242.238.88:<0.25817.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [rebalance:info,2014-08-19T16:49:34.420,ns_1@10.242.238.88:<0.25816.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 511 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:34.420,ns_1@10.242.238.88:<0.25822.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 511 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:34.420,ns_1@10.242.238.88:<0.25823.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 511 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:34.426,ns_1@10.242.238.88:<0.25824.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 511 into 'ns_1@10.242.238.91' is <18126.18807.0> [ns_server:debug,2014-08-19T16:49:34.434,ns_1@10.242.238.88:<0.25824.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 511 into 'ns_1@10.242.238.89' is <18124.24991.0> [rebalance:debug,2014-08-19T16:49:34.434,ns_1@10.242.238.88:<0.25816.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 511 is <0.25824.0> [ns_server:debug,2014-08-19T16:49:34.531,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 170. Nacking mccouch update. [views:debug,2014-08-19T16:49:34.531,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/170. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:34.531,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",170,active,0} [ns_server:debug,2014-08-19T16:49:34.532,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854, 488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,176, 904,776,410,282,644,516,878,384,256,1006,746,618,252,980,852,486,358,720,656, 592,528,226,954,890,826,460,396,332,268,1018,758,694,630,566,200,992,928,864, 800,498,434,370,306,732,668,604,540,238,174,966,902,838,774,472,408,344,280, 706,642,578,514,212,940,876,812,510,446,382,318,1004,744,680,616,552,250,186, 978,914,850,786,484,420,356,292,718,654,590,526,224,952,888,824,458,394,330, 266,1016,756,692,628,564,198,990,926,862,798,496,432,368,304,730,666,602,538, 236,172,964,900,836,772,470,406,342,278,704,640,576,512,210,938,874,810,508, 444,380,316,1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290,716, 652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924, 860,796,494,430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340, 276,766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246, 182,974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390, 326,262,1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598, 534,232,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806, 504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350,286,712, 648,584,520,218,1023,946,882,818,452,388,324,260,1010,686,558,192,920,792, 426,298,660,532,894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842, 476,348,710,582,216,944,816,450,322,684,556,190,918,790,424,296,658,530,892, 398,270,1020,760,632,994,866,500,372,734,606,240,968,840,474,346,708,580,214, 942,814,448,320,682,554,188,916,788,422,294] [ns_server:debug,2014-08-19T16:49:34.536,ns_1@10.242.238.88:<0.25825.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.24991.0>}, {'ns_1@10.242.238.91',<18126.18807.0>}]) [rebalance:info,2014-08-19T16:49:34.537,ns_1@10.242.238.88:<0.25816.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:34.537,ns_1@10.242.238.88:<0.25816.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 511 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:34.538,ns_1@10.242.238.88:<0.25816.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:34.538,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{511, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:34.543,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1022, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:34.543,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1022, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.25851.0>) [ns_server:debug,2014-08-19T16:49:34.544,ns_1@10.242.238.88:<0.25852.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:34.544,ns_1@10.242.238.88:<0.25852.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:34.544,ns_1@10.242.238.88:<0.25851.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1022 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:34.544,ns_1@10.242.238.88:<0.25857.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1022 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:34.545,ns_1@10.242.238.88:<0.25858.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1022 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:34.551,ns_1@10.242.238.88:<0.25859.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1022 into 'ns_1@10.242.238.90' is <18125.18867.0> [ns_server:debug,2014-08-19T16:49:34.553,ns_1@10.242.238.88:<0.25859.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1022 into 'ns_1@10.242.238.91' is <18126.18818.0> [rebalance:debug,2014-08-19T16:49:34.553,ns_1@10.242.238.88:<0.25851.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1022 is <0.25859.0> [ns_server:debug,2014-08-19T16:49:34.536,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:initiate_bucket_rebalance:232]Moves: [{move_state,0, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,1, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,2, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,3, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,4, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,5, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,6, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,7, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,8, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,9, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,10, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,11, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,12, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,13, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,14, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,15, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,16, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,17, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,18, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,19, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,20, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,21, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,22, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,23, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,24, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,25, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,26, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,27, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,28, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,29, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,30, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,31, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,32, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,33, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,34, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,35, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,36, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,37, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,38, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,39, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,40, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,41, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,42, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,43, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,44, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,45, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,46, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,47, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,48, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,49, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,50, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,51, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,52, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,53, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,54, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,55, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,56, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,57, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,58, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,59, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,60, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,61, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,62, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,63, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,64, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,65, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,66, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,67, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,68, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,69, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,70, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,71, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,72, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,73, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,74, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,75, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,76, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,77, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,78, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,79, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,80, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,81, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,82, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,83, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,84, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,85, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,86, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,87, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,88, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,89, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,90, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,91, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,92, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,93, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,94, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,95, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,96, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,97, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,98, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,99, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,100, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,101, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,102, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,103, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,104, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,105, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,106, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,107, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,108, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,109, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,110, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,111, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,112, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,113, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,114, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,115, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,116, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,117, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,118, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,119, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,120, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,121, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,122, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,123, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,124, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,125, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,126, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,127, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,128, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,129, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,130, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,131, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,132, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,133, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,134, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,135, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,136, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,137, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,138, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,139, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,140, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,141, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,142, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,143, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,144, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,145, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,146, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,147, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,148, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,149, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,150, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,151, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,152, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,153, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,154, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,155, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,156, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,157, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,158, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,159, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,160, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,161, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,162, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,163, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,164, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,165, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,166, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,167, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,168, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,169, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,170, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,171, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,172, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,173, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,174, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,175, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,176, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,177, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,178, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,179, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,180, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,181, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,182, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,183, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,184, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,185, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,186, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,187, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,188, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,189, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,190, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,191, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,192, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,193, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,194, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,195, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,196, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,197, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,198, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,199, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,200, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,201, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,202, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,203, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,204, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,205, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,206, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,207, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,208, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,209, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,210, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,211, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,212, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,213, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,214, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,215, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,216, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,217, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,218, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,219, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,220, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,221, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,222, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,223, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,224, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,225, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,226, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,227, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,228, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,229, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,230, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,231, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,232, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,233, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,234, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,235, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,236, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,237, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,238, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,239, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,240, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,241, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,242, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,243, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,244, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,245, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,246, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,247, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,248, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,249, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,250, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,251, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,252, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,253, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,254, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,255, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.88','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,256, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,257, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,258, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,259, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,260, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,261, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,262, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,263, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,264, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,265, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,266, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,267, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,268, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,269, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,270, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,271, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,272, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,273, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,274, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,275, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,276, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,277, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,278, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,279, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,280, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,281, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,282, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,283, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,284, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,285, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,286, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,287, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,288, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,289, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,290, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,291, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,292, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,293, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,294, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,295, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,296, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,297, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,298, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,299, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,300, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,301, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,302, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,303, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,304, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,305, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,306, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,307, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,308, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,309, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,310, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,311, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,312, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,313, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,314, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,315, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,316, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,317, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,318, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,319, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,320, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,321, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,322, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,323, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,324, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,325, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,326, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,327, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,328, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,329, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,330, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,331, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,332, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,333, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,334, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,335, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,336, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,337, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,338, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,339, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,340, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,341, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,342, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,343, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,344, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,345, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,346, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,347, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,348, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,349, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,350, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,351, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,352, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,353, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,354, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,355, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,356, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,357, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,358, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,359, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,360, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,361, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,362, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,363, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,364, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,365, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,366, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,367, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,368, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,369, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,370, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,371, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,372, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,373, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,374, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,375, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,376, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,377, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,378, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,379, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,380, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,381, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,382, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,383, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,384, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,385, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,386, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,387, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,388, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,389, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,390, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,391, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,392, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,393, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,394, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,395, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,396, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,397, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,398, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,399, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,400, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,401, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,402, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,403, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,404, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,405, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,406, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,407, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,408, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,409, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,410, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,411, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,412, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,413, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,414, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,415, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,416, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,417, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,418, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,419, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,420, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,421, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,422, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,423, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,424, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,425, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,426, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,427, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,428, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,429, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,430, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,431, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,432, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,433, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,434, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,435, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,436, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,437, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,438, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,439, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,440, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,441, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,442, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,443, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,444, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,445, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,446, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,447, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,448, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,449, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,450, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,451, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,452, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,453, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,454, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,455, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,456, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,457, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,458, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,459, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,460, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,461, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,462, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,463, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,464, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,465, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,466, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,467, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,468, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,469, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,470, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,471, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,472, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,473, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,474, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,475, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,476, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,477, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,478, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,479, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,480, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,481, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,482, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,483, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,484, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,485, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,486, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,487, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,488, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,489, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,490, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,491, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,492, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,493, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,494, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,495, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,496, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,497, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,498, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,499, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,500, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,501, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,502, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,503, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,504, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,505, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,506, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,507, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,508, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,509, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,510, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,511, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,512, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,513, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,514, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,515, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,516, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,517, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,518, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,519, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,520, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,521, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,522, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,523, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,524, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,525, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,526, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,527, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,528, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,529, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,530, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,531, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,532, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,533, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,534, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,535, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,536, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,537, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,538, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,539, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,540, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,541, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,542, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,543, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,544, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,545, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,546, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,547, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,548, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,549, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,550, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,551, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,552, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,553, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,554, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,555, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,556, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,557, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,558, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,559, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,560, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,561, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,562, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,563, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,564, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,565, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,566, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,567, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,568, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,569, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,570, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,571, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,572, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,573, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,574, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,575, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,576, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,577, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,578, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,579, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,580, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,581, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,582, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,583, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,584, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,585, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,586, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,587, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,588, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,589, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,590, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,591, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,592, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,593, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,594, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,595, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,596, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,597, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,598, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,599, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,600, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,601, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,602, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,603, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,604, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,605, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,606, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,607, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,608, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,609, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,610, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,611, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,612, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,613, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,614, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,615, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,616, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,617, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,618, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,619, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,620, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,621, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,622, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,623, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,624, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,625, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,626, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,627, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,628, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,629, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,630, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,631, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,632, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,633, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,634, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,635, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,636, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,637, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,638, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,639, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,640, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,641, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,642, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,643, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,644, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,645, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,646, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,647, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,648, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,649, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,650, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,651, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,652, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,653, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,654, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,655, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,656, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,657, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,658, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,659, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,660, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,661, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,662, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,663, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,664, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,665, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,666, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,667, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,668, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,669, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,670, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,671, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,672, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,673, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,674, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,675, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,676, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,677, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,678, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,679, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,680, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,681, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,682, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,683, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,684, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,685, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,686, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,687, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,688, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,689, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,690, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,691, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,692, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,693, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,694, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,695, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,696, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,697, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,698, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,699, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,700, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,701, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,702, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,703, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,704, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,705, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,706, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,707, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,708, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,709, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,710, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,711, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,712, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,713, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,714, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,715, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,716, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,717, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,718, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,719, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,720, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,721, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,722, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,723, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,724, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,725, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,726, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,727, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,728, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,729, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,730, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,731, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,732, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,733, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,734, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,735, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,736, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,737, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,738, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,739, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,740, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,741, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,742, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,743, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,744, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,745, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,746, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,747, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,748, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,749, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,750, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,751, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,752, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,753, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,754, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,755, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,756, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,757, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,758, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,759, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,760, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,761, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,762, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,763, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,764, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,765, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,766, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,767, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,768, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,769, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,770, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,771, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,772, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,773, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,774, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,775, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,776, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,777, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,778, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,779, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,780, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,781, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,782, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,783, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,784, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,785, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,786, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,787, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,788, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,789, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,790, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,791, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,792, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,793, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,794, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,795, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,796, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,797, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,798, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,799, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,800, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,801, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,802, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,803, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,804, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,805, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,806, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,807, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,808, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,809, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,810, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,811, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,812, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,813, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,814, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,815, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,816, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,817, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,818, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,819, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,820, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,821, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,822, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,823, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,824, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,825, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,826, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,827, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,828, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,829, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,830, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,831, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,832, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,833, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,834, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,835, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,836, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,837, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,838, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,839, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,840, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,841, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,842, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,843, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,844, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,845, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,846, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,847, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,848, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,849, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,850, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,851, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,852, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.88'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,853, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,854, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,855, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,856, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,857, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,858, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,859, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,860, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,861, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,862, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,863, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,864, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,865, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,866, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,867, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,868, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,869, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,870, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,871, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,872, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,873, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,874, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,875, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,876, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,877, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,878, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,879, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,880, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,881, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,882, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,883, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,884, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,885, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,886, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,887, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,888, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,889, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,890, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,891, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,892, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,893, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,894, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,895, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,896, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,897, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,898, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,899, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,900, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,901, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,902, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,903, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,904, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,905, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,906, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,907, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,908, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,909, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,910, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,911, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,912, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,913, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,914, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,915, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,916, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,917, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,918, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,919, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,920, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,921, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,922, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,923, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,924, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,925, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,926, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,927, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,928, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,929, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,930, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,931, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,932, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,933, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,934, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,935, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,936, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,937, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,938, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,939, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,940, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,941, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,942, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,943, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,944, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,945, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,946, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,947, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,948, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,949, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,950, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,951, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,952, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,953, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,954, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,955, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,956, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,957, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,958, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,959, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,960, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,961, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,962, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,963, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,964, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,965, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,966, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,967, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,968, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,969, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,970, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,971, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,972, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,973, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,974, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,975, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,976, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,977, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,978, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,979, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,980, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,981, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,982, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,983, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,984, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,985, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,986, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,987, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,988, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,989, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,990, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,991, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,992, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,993, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,994, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,995, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,996, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,997, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,998, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,999, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1000, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1001, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1002, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1003, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1004, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1005, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1006, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1007, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1008, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1009, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1010, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1011, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1012, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1013, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1014, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1015, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1016, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1017, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1018, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1019, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1020, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1021, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1022, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,1023, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}] [ns_server:debug,2014-08-19T16:49:34.587,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1023) [ns_server:debug,2014-08-19T16:49:34.588,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452574,257683}, tap_estimate, {replica_building,"default",1023,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.18828.0>, <<"replication_building_1023_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:34.588,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452574,276910}, tap_estimate, {replica_building,"default",1023,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.18782.0>, <<"replication_building_1023_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:34.588,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 767) [ns_server:debug,2014-08-19T16:49:34.595,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452574,382415}, tap_estimate, {replica_building,"default",767,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.18802.0>, <<"replication_building_767_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:34.596,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452574,399794}, tap_estimate, {replica_building,"default",767,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.18847.0>, <<"replication_building_767_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:34.596,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 511) [ns_server:debug,2014-08-19T16:49:34.596,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452574,510050}, tap_estimate, {replica_building,"default",511,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.18807.0>, <<"replication_building_511_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:34.596,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452574,527153}, tap_estimate, {replica_building,"default",511,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.24991.0>, <<"replication_building_511_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:34.596,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1022) [views:debug,2014-08-19T16:49:34.615,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/170. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:34.615,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",170,active,0} [ns_server:debug,2014-08-19T16:49:34.644,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452574,635151}, tap_estimate, {replica_building,"default",1022,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.18867.0>, <<"replication_building_1022_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:34.662,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452574,653965}, tap_estimate, {replica_building,"default",1022,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.18818.0>, <<"replication_building_1022_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:34.663,ns_1@10.242.238.88:<0.25860.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.18818.0>}, {'ns_1@10.242.238.90',<18125.18867.0>}]) [rebalance:info,2014-08-19T16:49:34.663,ns_1@10.242.238.88:<0.25851.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:34.664,ns_1@10.242.238.88:<0.25851.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1022 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:34.664,ns_1@10.242.238.88:<0.25851.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:34.665,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1022, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:34.670,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{766, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:34.670,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",766, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.25872.0>) [ns_server:debug,2014-08-19T16:49:34.670,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 766) [ns_server:debug,2014-08-19T16:49:34.670,ns_1@10.242.238.88:<0.25873.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:34.670,ns_1@10.242.238.88:<0.25873.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:34.670,ns_1@10.242.238.88:<0.25872.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 766 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:34.671,ns_1@10.242.238.88:<0.25878.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 766 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:34.671,ns_1@10.242.238.88:<0.25879.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 766 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:34.675,ns_1@10.242.238.88:<0.25880.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 766 into 'ns_1@10.242.238.91' is <18126.18838.0> [ns_server:debug,2014-08-19T16:49:34.678,ns_1@10.242.238.88:<0.25880.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 766 into 'ns_1@10.242.238.90' is <18125.18886.0> [rebalance:debug,2014-08-19T16:49:34.678,ns_1@10.242.238.88:<0.25872.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 766 is <0.25880.0> [rebalance:info,2014-08-19T16:49:34.735,ns_1@10.242.238.88:<0.25816.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 511 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:49:34.735,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 511 state to active [rebalance:info,2014-08-19T16:49:34.736,ns_1@10.242.238.88:<0.25816.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 511 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:34.737,ns_1@10.242.238.88:<0.25816.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:34.756,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 168. Nacking mccouch update. [views:debug,2014-08-19T16:49:34.757,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/168. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:34.757,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",168,active,0} [ns_server:debug,2014-08-19T16:49:34.758,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854, 488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,176, 904,776,410,282,644,516,878,384,256,1006,746,618,252,980,852,486,358,720,656, 592,528,226,954,890,826,460,396,332,268,1018,758,694,630,566,200,992,928,864, 800,498,434,370,306,732,668,604,540,238,174,966,902,838,774,472,408,344,280, 706,642,578,514,212,940,876,812,510,446,382,318,1004,744,680,616,552,250,186, 978,914,850,786,484,420,356,292,718,654,590,526,224,952,888,824,458,394,330, 266,1016,756,692,628,564,198,990,926,862,798,496,432,368,304,730,666,602,538, 236,172,964,900,836,772,470,406,342,278,704,640,576,512,210,938,874,810,508, 444,380,316,1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290,716, 652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924, 860,796,494,430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340, 276,766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246, 182,974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390, 326,262,1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598, 534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870, 806,504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350,286, 712,648,584,520,218,1023,946,882,818,452,388,324,260,1010,686,558,192,920, 792,426,298,660,532,894,400,272,1022,762,634,996,868,502,374,736,608,242,970, 842,476,348,710,582,216,944,816,450,322,684,556,190,918,790,424,296,658,530, 892,398,270,1020,760,632,994,866,500,372,734,606,240,968,840,474,346,708,580, 214,942,814,448,320,682,554,188,916,788,422,294] [ns_server:debug,2014-08-19T16:49:34.768,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452574,759685}, tap_estimate, {replica_building,"default",766,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.18838.0>, <<"replication_building_766_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:34.787,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452574,777972}, tap_estimate, {replica_building,"default",766,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.18886.0>, <<"replication_building_766_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:34.787,ns_1@10.242.238.88:<0.25881.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.18886.0>}, {'ns_1@10.242.238.91',<18126.18838.0>}]) [rebalance:info,2014-08-19T16:49:34.787,ns_1@10.242.238.88:<0.25872.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:34.788,ns_1@10.242.238.88:<0.25872.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 766 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:34.788,ns_1@10.242.238.88:<0.25872.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:34.789,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{766, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:34.794,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{510, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:34.794,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",510, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.25911.0>) [ns_server:debug,2014-08-19T16:49:34.794,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 510) [ns_server:debug,2014-08-19T16:49:34.795,ns_1@10.242.238.88:<0.25912.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:34.795,ns_1@10.242.238.88:<0.25912.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:34.795,ns_1@10.242.238.88:<0.25911.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 510 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:34.795,ns_1@10.242.238.88:<0.25917.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 510 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:34.795,ns_1@10.242.238.88:<0.25918.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 510 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:34.800,ns_1@10.242.238.88:<0.25919.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 510 into 'ns_1@10.242.238.91' is <18126.18846.0> [ns_server:debug,2014-08-19T16:49:34.802,ns_1@10.242.238.88:<0.25919.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 510 into 'ns_1@10.242.238.89' is <18124.25014.0> [rebalance:debug,2014-08-19T16:49:34.802,ns_1@10.242.238.88:<0.25911.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 510 is <0.25919.0> [views:debug,2014-08-19T16:49:34.818,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/168. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:34.818,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",168,active,0} [ns_server:info,2014-08-19T16:49:34.849,ns_1@10.242.238.88:ns_doctor<0.20988.0>:ns_doctor:update_status:241]The following buckets became ready on node 'ns_1@10.242.238.89': ["default"] [ns_server:debug,2014-08-19T16:49:34.892,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452574,883642}, tap_estimate, {replica_building,"default",510,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.18846.0>, <<"replication_building_510_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:34.910,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452574,900998}, tap_estimate, {replica_building,"default",510,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25014.0>, <<"replication_building_510_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:34.910,ns_1@10.242.238.88:<0.25920.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25014.0>}, {'ns_1@10.242.238.91',<18126.18846.0>}]) [rebalance:info,2014-08-19T16:49:34.910,ns_1@10.242.238.88:<0.25911.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:34.911,ns_1@10.242.238.88:<0.25911.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 510 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:34.911,ns_1@10.242.238.88:<0.25911.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:34.912,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{510, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:34.916,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1021, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:34.917,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1021, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.25946.0>) [ns_server:debug,2014-08-19T16:49:34.917,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1021) [ns_server:debug,2014-08-19T16:49:34.917,ns_1@10.242.238.88:<0.25947.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:34.917,ns_1@10.242.238.88:<0.25947.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:34.917,ns_1@10.242.238.88:<0.25946.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1021 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:34.918,ns_1@10.242.238.88:<0.25952.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1021 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:34.918,ns_1@10.242.238.88:<0.25953.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1021 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:34.918,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 166. Nacking mccouch update. [views:debug,2014-08-19T16:49:34.918,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/166. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:34.918,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",166,active,0} [ns_server:debug,2014-08-19T16:49:34.919,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854, 488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,176, 904,776,410,282,644,516,878,384,256,1006,746,618,252,980,852,486,358,720,656, 592,528,226,954,890,826,460,396,332,268,1018,758,694,630,566,200,992,928,864, 800,498,434,370,306,732,668,604,540,238,174,966,902,838,774,472,408,344,280, 706,642,578,514,212,940,876,812,510,446,382,318,1004,744,680,616,552,250,186, 978,914,850,786,484,420,356,292,718,654,590,526,224,952,888,824,458,394,330, 266,1016,756,692,628,564,198,990,926,862,798,496,432,368,304,730,666,602,538, 236,172,964,900,836,772,470,406,342,278,704,640,576,512,210,938,874,810,508, 444,380,316,1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290,716, 652,588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924, 860,796,494,430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340, 276,766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246, 182,974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390, 326,262,1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598, 534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870, 806,504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350,286, 712,648,584,520,218,1023,946,882,818,452,388,324,260,1010,686,558,192,920, 792,426,298,660,532,166,894,400,272,1022,762,634,996,868,502,374,736,608,242, 970,842,476,348,710,582,216,944,816,450,322,684,556,190,918,790,424,296,658, 530,892,398,270,1020,760,632,994,866,500,372,734,606,240,968,840,474,346,708, 580,214,942,814,448,320,682,554,188,916,788,422,294] [ns_server:debug,2014-08-19T16:49:34.922,ns_1@10.242.238.88:<0.25954.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1021 into 'ns_1@10.242.238.90' is <18125.18906.0> [ns_server:debug,2014-08-19T16:49:34.924,ns_1@10.242.238.88:<0.25954.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1021 into 'ns_1@10.242.238.91' is <18126.18865.0> [rebalance:debug,2014-08-19T16:49:34.924,ns_1@10.242.238.88:<0.25946.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1021 is <0.25954.0> [views:debug,2014-08-19T16:49:34.969,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/166. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:34.969,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",166,active,0} [ns_server:debug,2014-08-19T16:49:35.015,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452575,6158}, tap_estimate, {replica_building,"default",1021,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.18906.0>, <<"replication_building_1021_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:35.033,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452575,24968}, tap_estimate, {replica_building,"default",1021,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.18865.0>, <<"replication_building_1021_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:35.034,ns_1@10.242.238.88:<0.25955.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.18865.0>}, {'ns_1@10.242.238.90',<18125.18906.0>}]) [rebalance:info,2014-08-19T16:49:35.034,ns_1@10.242.238.88:<0.25946.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:35.035,ns_1@10.242.238.88:<0.25946.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1021 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:35.035,ns_1@10.242.238.88:<0.25946.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:35.036,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1021, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:35.041,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{765, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:35.041,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",765, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.25981.0>) [ns_server:debug,2014-08-19T16:49:35.041,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 765) [ns_server:debug,2014-08-19T16:49:35.041,ns_1@10.242.238.88:<0.25982.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:35.042,ns_1@10.242.238.88:<0.25982.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:35.042,ns_1@10.242.238.88:<0.25981.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 765 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:35.042,ns_1@10.242.238.88:<0.25987.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 765 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:35.042,ns_1@10.242.238.88:<0.25988.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 765 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:35.046,ns_1@10.242.238.88:<0.25989.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 765 into 'ns_1@10.242.238.91' is <18126.18885.0> [ns_server:debug,2014-08-19T16:49:35.048,ns_1@10.242.238.88:<0.25989.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 765 into 'ns_1@10.242.238.90' is <18125.18911.0> [rebalance:debug,2014-08-19T16:49:35.048,ns_1@10.242.238.88:<0.25981.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 765 is <0.25989.0> [ns_server:debug,2014-08-19T16:49:35.062,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 164. Nacking mccouch update. [views:debug,2014-08-19T16:49:35.062,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/164. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:35.062,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",164,active,0} [ns_server:debug,2014-08-19T16:49:35.063,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854, 488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,176, 904,776,410,282,644,516,878,384,256,1006,746,618,252,980,852,486,358,720,592, 226,954,890,826,460,396,332,268,1018,758,694,630,566,200,992,928,864,800,498, 434,370,306,732,668,604,540,238,174,966,902,838,774,472,408,344,280,706,642, 578,514,212,940,876,812,510,446,382,318,1004,744,680,616,552,250,186,978,914, 850,786,484,420,356,292,718,654,590,526,224,952,888,824,458,394,330,266,1016, 756,692,628,564,198,990,926,862,798,496,432,368,304,730,666,602,538,236,172, 964,900,836,772,470,406,342,278,704,640,576,512,210,938,874,810,508,444,380, 316,1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290,716,652,588, 524,222,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924,860,796, 494,430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766, 702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,974, 910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390,326,262, 1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598,534,232, 168,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806,504, 440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350,286,712,648, 584,520,218,1023,946,882,818,452,388,324,260,1010,686,558,192,920,792,426, 298,660,532,166,894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842, 476,348,710,582,216,944,816,450,322,684,556,190,918,790,424,296,658,530,164, 892,398,270,1020,760,632,994,866,500,372,734,606,240,968,840,474,346,708,580, 214,942,814,448,320,682,554,188,916,788,422,294,656,528] [views:debug,2014-08-19T16:49:35.137,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/164. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:35.137,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",164,active,0} [ns_server:debug,2014-08-19T16:49:35.138,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452575,129849}, tap_estimate, {replica_building,"default",765,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.18885.0>, <<"replication_building_765_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:35.156,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452575,147199}, tap_estimate, {replica_building,"default",765,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.18911.0>, <<"replication_building_765_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:35.156,ns_1@10.242.238.88:<0.25990.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.18911.0>}, {'ns_1@10.242.238.91',<18126.18885.0>}]) [rebalance:info,2014-08-19T16:49:35.156,ns_1@10.242.238.88:<0.25981.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:35.157,ns_1@10.242.238.88:<0.25981.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 765 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:35.157,ns_1@10.242.238.88:<0.25981.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:35.158,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{765, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:35.163,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{509, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:35.163,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",509, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.26007.0>) [ns_server:debug,2014-08-19T16:49:35.165,ns_1@10.242.238.88:<0.26008.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:35.165,ns_1@10.242.238.88:<0.26008.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:35.165,ns_1@10.242.238.88:<0.26007.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 509 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:35.166,ns_1@10.242.238.88:<0.26013.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 509 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:35.166,ns_1@10.242.238.88:<0.26014.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 509 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:35.170,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 509) [ns_server:debug,2014-08-19T16:49:35.171,ns_1@10.242.238.88:<0.26015.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 509 into 'ns_1@10.242.238.91' is <18126.18906.0> [ns_server:debug,2014-08-19T16:49:35.174,ns_1@10.242.238.88:<0.26015.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 509 into 'ns_1@10.242.238.89' is <18124.25044.0> [rebalance:debug,2014-08-19T16:49:35.174,ns_1@10.242.238.88:<0.26007.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 509 is <0.26015.0> [ns_server:debug,2014-08-19T16:49:35.264,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452575,255298}, tap_estimate, {replica_building,"default",509,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.18906.0>, <<"replication_building_509_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:35.279,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 162. Nacking mccouch update. [views:debug,2014-08-19T16:49:35.279,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/162. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:35.279,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",162,active,0} [ns_server:debug,2014-08-19T16:49:35.280,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854, 488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,176, 904,776,410,282,644,516,878,384,256,1006,746,618,252,980,852,486,358,720,592, 226,954,890,826,460,396,332,268,1018,758,694,630,566,200,992,928,864,800,498, 434,370,306,732,668,604,540,238,174,966,902,838,774,472,408,344,280,706,642, 578,514,212,940,876,812,510,446,382,318,1004,744,680,616,552,250,186,978,914, 850,786,484,420,356,292,718,654,590,526,224,952,888,824,458,394,330,266,1016, 756,692,628,564,198,990,926,862,798,496,432,368,304,730,666,602,538,236,172, 964,900,836,772,470,406,342,278,704,640,576,512,210,938,874,810,508,444,380, 316,1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290,716,652,588, 524,222,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924,860,796, 494,430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766, 702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,974, 910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390,326,262, 1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598,534,232, 168,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806,504, 440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350,286,712,648, 584,520,218,1023,946,882,818,452,388,324,260,1010,686,558,192,920,792,426, 298,660,532,166,894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842, 476,348,710,582,216,944,816,450,322,684,556,190,918,790,424,296,658,530,164, 892,398,270,1020,760,632,994,866,500,372,734,606,240,968,840,474,346,708,580, 214,942,814,448,320,682,554,188,916,788,422,294,656,528,162] [ns_server:debug,2014-08-19T16:49:35.281,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452575,272684}, tap_estimate, {replica_building,"default",509,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25044.0>, <<"replication_building_509_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:35.282,ns_1@10.242.238.88:<0.26016.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25044.0>}, {'ns_1@10.242.238.91',<18126.18906.0>}]) [rebalance:info,2014-08-19T16:49:35.282,ns_1@10.242.238.88:<0.26007.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:35.283,ns_1@10.242.238.88:<0.26007.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 509 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:35.283,ns_1@10.242.238.88:<0.26007.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:35.284,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{509, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:35.288,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1020, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:35.289,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1020, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.26042.0>) [ns_server:debug,2014-08-19T16:49:35.289,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1020) [ns_server:debug,2014-08-19T16:49:35.289,ns_1@10.242.238.88:<0.26043.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:35.289,ns_1@10.242.238.88:<0.26043.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:35.289,ns_1@10.242.238.88:<0.26042.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1020 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:35.289,ns_1@10.242.238.88:<0.26048.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1020 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:35.290,ns_1@10.242.238.88:<0.26049.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1020 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:35.293,ns_1@10.242.238.88:<0.26050.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1020 into 'ns_1@10.242.238.90' is <18125.18931.0> [ns_server:debug,2014-08-19T16:49:35.296,ns_1@10.242.238.88:<0.26050.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1020 into 'ns_1@10.242.238.91' is <18126.18925.0> [rebalance:debug,2014-08-19T16:49:35.296,ns_1@10.242.238.88:<0.26042.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1020 is <0.26050.0> [views:debug,2014-08-19T16:49:35.313,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/162. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:35.313,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",162,active,0} [ns_server:debug,2014-08-19T16:49:35.386,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452575,377322}, tap_estimate, {replica_building,"default",1020,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.18931.0>, <<"replication_building_1020_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:35.406,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452575,397045}, tap_estimate, {replica_building,"default",1020,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.18925.0>, <<"replication_building_1020_'ns_1@10.242.238.91'">>} [rebalance:info,2014-08-19T16:49:35.406,ns_1@10.242.238.88:<0.25872.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 766 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:49:35.406,ns_1@10.242.238.88:<0.25795.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 767 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:debug,2014-08-19T16:49:35.406,ns_1@10.242.238.88:<0.26051.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.18925.0>}, {'ns_1@10.242.238.90',<18125.18931.0>}]) [rebalance:info,2014-08-19T16:49:35.406,ns_1@10.242.238.88:<0.26042.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [ns_server:info,2014-08-19T16:49:35.406,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 766 state to active [rebalance:info,2014-08-19T16:49:35.407,ns_1@10.242.238.88:<0.26042.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1020 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:35.408,ns_1@10.242.238.88:<0.25872.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 766 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:35.408,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 767 state to active [rebalance:info,2014-08-19T16:49:35.409,ns_1@10.242.238.88:<0.25795.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 767 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:35.409,ns_1@10.242.238.88:<0.26042.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:35.410,ns_1@10.242.238.88:<0.25872.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:35.410,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1020, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [rebalance:info,2014-08-19T16:49:35.411,ns_1@10.242.238.88:<0.25795.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:35.415,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{764, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:35.415,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",764, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.26085.0>) [ns_server:debug,2014-08-19T16:49:35.415,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 764) [ns_server:debug,2014-08-19T16:49:35.416,ns_1@10.242.238.88:<0.26086.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:35.416,ns_1@10.242.238.88:<0.26086.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:35.416,ns_1@10.242.238.88:<0.26085.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 764 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:35.416,ns_1@10.242.238.88:<0.26091.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 764 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:35.416,ns_1@10.242.238.88:<0.26092.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 764 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:35.422,ns_1@10.242.238.88:<0.26093.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 764 into 'ns_1@10.242.238.91' is <18126.18951.0> [ns_server:debug,2014-08-19T16:49:35.425,ns_1@10.242.238.88:<0.26093.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 764 into 'ns_1@10.242.238.90' is <18125.18956.0> [rebalance:debug,2014-08-19T16:49:35.425,ns_1@10.242.238.88:<0.26085.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 764 is <0.26093.0> [ns_server:debug,2014-08-19T16:49:35.455,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 160. Nacking mccouch update. [views:debug,2014-08-19T16:49:35.455,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/160. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:35.455,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",160,active,0} [ns_server:debug,2014-08-19T16:49:35.456,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854, 488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,176, 904,776,410,282,644,516,878,384,256,1006,746,618,252,980,852,486,358,720,592, 226,954,890,826,460,396,332,268,1018,758,694,630,566,200,992,928,864,800,498, 434,370,306,732,668,604,540,238,174,966,902,838,774,472,408,344,280,706,642, 578,514,212,940,876,812,510,446,382,318,1004,744,680,616,552,250,186,978,914, 850,786,484,420,356,292,718,654,590,526,224,160,952,888,824,458,394,330,266, 1016,756,692,628,564,198,990,926,862,798,496,432,368,304,730,666,602,538,236, 172,964,900,836,772,470,406,342,278,704,640,576,512,210,938,874,810,508,444, 380,316,1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290,716,652, 588,524,222,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924,860, 796,494,430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276, 766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246,182, 974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390,326, 262,1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598,534, 232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806, 504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350,286,712, 648,584,520,218,1023,946,882,818,452,388,324,260,1010,686,558,192,920,792, 426,298,660,532,166,894,400,272,1022,762,634,996,868,502,374,736,608,242,970, 842,476,348,710,582,216,944,816,450,322,684,556,190,918,790,424,296,658,530, 164,892,398,270,1020,760,632,994,866,500,372,734,606,240,968,840,474,346,708, 580,214,942,814,448,320,682,554,188,916,788,422,294,656,528,162] [views:debug,2014-08-19T16:49:35.514,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/160. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:35.514,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",160,active,0} [ns_server:debug,2014-08-19T16:49:35.515,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452575,506183}, tap_estimate, {replica_building,"default",764,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.18951.0>, <<"replication_building_764_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:35.533,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452575,524209}, tap_estimate, {replica_building,"default",764,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.18956.0>, <<"replication_building_764_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:35.533,ns_1@10.242.238.88:<0.26094.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.18956.0>}, {'ns_1@10.242.238.91',<18126.18951.0>}]) [rebalance:info,2014-08-19T16:49:35.533,ns_1@10.242.238.88:<0.26085.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:35.534,ns_1@10.242.238.88:<0.26085.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 764 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:35.534,ns_1@10.242.238.88:<0.26085.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:35.535,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{764, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:35.540,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{508, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:35.540,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",508, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.26107.0>) [ns_server:debug,2014-08-19T16:49:35.541,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 508) [ns_server:debug,2014-08-19T16:49:35.541,ns_1@10.242.238.88:<0.26108.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:35.541,ns_1@10.242.238.88:<0.26108.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:35.541,ns_1@10.242.238.88:<0.26107.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 508 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:35.541,ns_1@10.242.238.88:<0.26113.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 508 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:35.542,ns_1@10.242.238.88:<0.26114.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 508 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:35.545,ns_1@10.242.238.88:<0.26115.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 508 into 'ns_1@10.242.238.91' is <18126.18970.0> [ns_server:debug,2014-08-19T16:49:35.547,ns_1@10.242.238.88:<0.26115.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 508 into 'ns_1@10.242.238.89' is <18124.25064.0> [rebalance:debug,2014-08-19T16:49:35.547,ns_1@10.242.238.88:<0.26107.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 508 is <0.26115.0> [ns_server:debug,2014-08-19T16:49:35.624,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 158. Nacking mccouch update. [views:debug,2014-08-19T16:49:35.624,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/158. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:35.624,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",158,active,0} [ns_server:debug,2014-08-19T16:49:35.625,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854, 488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,176, 904,776,410,282,644,516,878,384,256,1006,746,618,252,980,852,486,358,720,592, 226,954,890,826,460,396,332,268,1018,758,694,630,566,200,992,928,864,800,498, 434,370,306,732,668,604,540,238,174,966,902,838,774,472,408,344,280,706,642, 578,514,212,940,876,812,510,446,382,318,1004,744,680,616,552,250,186,978,914, 850,786,484,420,356,292,718,654,590,526,224,160,952,888,824,458,394,330,266, 1016,756,692,628,564,198,990,926,862,798,496,432,368,304,730,666,602,538,236, 172,964,900,836,772,470,406,342,278,704,640,576,512,210,938,874,810,508,444, 380,316,1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290,716,652, 588,524,222,158,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924, 860,796,494,430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340, 276,766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246, 182,974,910,846,782,480,416,352,288,714,650,586,522,220,948,884,820,454,390, 326,262,1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598, 534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870, 806,504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350,286, 712,648,584,520,218,1023,946,882,818,452,388,324,260,1010,686,558,192,920, 792,426,298,660,532,166,894,400,272,1022,762,634,996,868,502,374,736,608,242, 970,842,476,348,710,582,216,944,816,450,322,684,556,190,918,790,424,296,658, 530,164,892,398,270,1020,760,632,994,866,500,372,734,606,240,968,840,474,346, 708,580,214,942,814,448,320,682,554,188,916,788,422,294,656,528,162] [ns_server:debug,2014-08-19T16:49:35.637,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452575,628696}, tap_estimate, {replica_building,"default",508,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.18970.0>, <<"replication_building_508_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:35.655,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452575,646504}, tap_estimate, {replica_building,"default",508,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25064.0>, <<"replication_building_508_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:35.656,ns_1@10.242.238.88:<0.26116.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25064.0>}, {'ns_1@10.242.238.91',<18126.18970.0>}]) [rebalance:info,2014-08-19T16:49:35.656,ns_1@10.242.238.88:<0.26107.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:35.656,ns_1@10.242.238.88:<0.26107.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 508 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:35.657,ns_1@10.242.238.88:<0.26107.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:35.657,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{508, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:35.662,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1019, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:35.662,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1019, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.26142.0>) [ns_server:debug,2014-08-19T16:49:35.662,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1019) [ns_server:debug,2014-08-19T16:49:35.663,ns_1@10.242.238.88:<0.26143.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:35.663,ns_1@10.242.238.88:<0.26143.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:35.663,ns_1@10.242.238.88:<0.26142.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1019 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:35.663,ns_1@10.242.238.88:<0.26148.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1019 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:35.664,ns_1@10.242.238.88:<0.26149.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1019 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:35.668,ns_1@10.242.238.88:<0.26150.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1019 into 'ns_1@10.242.238.90' is <18125.18982.0> [ns_server:debug,2014-08-19T16:49:35.670,ns_1@10.242.238.88:<0.26150.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1019 into 'ns_1@10.242.238.91' is <18126.18976.0> [rebalance:debug,2014-08-19T16:49:35.670,ns_1@10.242.238.88:<0.26142.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1019 is <0.26150.0> [views:debug,2014-08-19T16:49:35.691,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/158. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:35.691,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",158,active,0} [ns_server:debug,2014-08-19T16:49:35.760,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452575,751966}, tap_estimate, {replica_building,"default",1019,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.18982.0>, <<"replication_building_1019_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:35.778,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452575,769769}, tap_estimate, {replica_building,"default",1019,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.18976.0>, <<"replication_building_1019_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:35.779,ns_1@10.242.238.88:<0.26151.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.18976.0>}, {'ns_1@10.242.238.90',<18125.18982.0>}]) [rebalance:info,2014-08-19T16:49:35.779,ns_1@10.242.238.88:<0.26142.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:35.780,ns_1@10.242.238.88:<0.26142.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1019 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:35.780,ns_1@10.242.238.88:<0.26142.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:35.781,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1019, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:35.786,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{763, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:35.786,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",763, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.26177.0>) [ns_server:debug,2014-08-19T16:49:35.786,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 763) [ns_server:debug,2014-08-19T16:49:35.786,ns_1@10.242.238.88:<0.26178.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:35.787,ns_1@10.242.238.88:<0.26178.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:35.787,ns_1@10.242.238.88:<0.26177.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 763 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:35.787,ns_1@10.242.238.88:<0.26183.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 763 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:35.787,ns_1@10.242.238.88:<0.26184.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 763 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:35.790,ns_1@10.242.238.88:<0.26185.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 763 into 'ns_1@10.242.238.91' is <18126.18996.0> [ns_server:debug,2014-08-19T16:49:35.793,ns_1@10.242.238.88:<0.26185.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 763 into 'ns_1@10.242.238.90' is <18125.19001.0> [rebalance:debug,2014-08-19T16:49:35.793,ns_1@10.242.238.88:<0.26177.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 763 is <0.26185.0> [ns_server:debug,2014-08-19T16:49:35.841,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 156. Nacking mccouch update. [views:debug,2014-08-19T16:49:35.841,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/156. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:35.842,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",156,active,0} [ns_server:debug,2014-08-19T16:49:35.843,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854, 488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,176, 904,776,410,282,644,516,878,384,256,1006,746,618,252,980,852,486,358,720,592, 226,954,890,826,460,396,332,268,1018,758,694,630,566,200,992,928,864,800,498, 434,370,306,732,668,604,540,238,174,966,902,838,774,472,408,344,280,706,642, 578,514,212,940,876,812,510,446,382,318,1004,744,680,616,552,250,186,978,914, 850,786,484,420,356,292,718,654,590,526,224,160,952,888,824,458,394,330,266, 1016,756,692,628,564,198,990,926,862,798,496,432,368,304,730,666,602,538,236, 172,964,900,836,772,470,406,342,278,704,640,576,512,210,938,874,810,508,444, 380,316,1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290,716,652, 588,524,222,158,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924, 860,796,494,430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340, 276,766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246, 182,974,910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454, 390,326,262,1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662, 598,534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934, 870,806,504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350, 286,712,648,584,520,218,1023,946,882,818,452,388,324,260,1010,686,558,192, 920,792,426,298,660,532,166,894,400,272,1022,762,634,996,868,502,374,736,608, 242,970,842,476,348,710,582,216,944,816,450,322,684,556,190,918,790,424,296, 658,530,164,892,398,270,1020,760,632,994,866,500,372,734,606,240,968,840,474, 346,708,580,214,942,814,448,320,682,554,188,916,788,422,294,656,528,162] [ns_server:debug,2014-08-19T16:49:35.883,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452575,874630}, tap_estimate, {replica_building,"default",763,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.18996.0>, <<"replication_building_763_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:35.902,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452575,893779}, tap_estimate, {replica_building,"default",763,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19001.0>, <<"replication_building_763_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:35.903,ns_1@10.242.238.88:<0.26186.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.19001.0>}, {'ns_1@10.242.238.91',<18126.18996.0>}]) [rebalance:info,2014-08-19T16:49:35.903,ns_1@10.242.238.88:<0.26177.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:35.904,ns_1@10.242.238.88:<0.26177.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 763 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:35.904,ns_1@10.242.238.88:<0.26177.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:35.905,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{763, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:35.910,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{507, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:35.910,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",507, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.26198.0>) [ns_server:debug,2014-08-19T16:49:35.910,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 507) [ns_server:debug,2014-08-19T16:49:35.910,ns_1@10.242.238.88:<0.26199.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:35.911,ns_1@10.242.238.88:<0.26199.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:35.911,ns_1@10.242.238.88:<0.26198.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 507 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:35.911,ns_1@10.242.238.88:<0.26204.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 507 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:35.911,ns_1@10.242.238.88:<0.26205.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 507 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:35.916,ns_1@10.242.238.88:<0.26206.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 507 into 'ns_1@10.242.238.91' is <18126.19016.0> [views:debug,2014-08-19T16:49:35.917,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/156. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:35.917,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",156,active,0} [ns_server:debug,2014-08-19T16:49:35.919,ns_1@10.242.238.88:<0.26206.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 507 into 'ns_1@10.242.238.89' is <18124.25090.0> [rebalance:debug,2014-08-19T16:49:35.919,ns_1@10.242.238.88:<0.26198.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 507 is <0.26206.0> [ns_server:debug,2014-08-19T16:49:36.009,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452576,191}, tap_estimate, {replica_building,"default",507,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19016.0>, <<"replication_building_507_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:36.027,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452576,18363}, tap_estimate, {replica_building,"default",507,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25090.0>, <<"replication_building_507_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:36.027,ns_1@10.242.238.88:<0.26207.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25090.0>}, {'ns_1@10.242.238.91',<18126.19016.0>}]) [rebalance:info,2014-08-19T16:49:36.027,ns_1@10.242.238.88:<0.26198.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:36.028,ns_1@10.242.238.88:<0.26198.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 507 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:36.029,ns_1@10.242.238.88:<0.26198.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:36.029,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{507, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:36.034,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1018, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:36.034,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1018, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.26233.0>) [ns_server:debug,2014-08-19T16:49:36.034,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1018) [ns_server:debug,2014-08-19T16:49:36.034,ns_1@10.242.238.88:<0.26234.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:36.034,ns_1@10.242.238.88:<0.26234.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:36.035,ns_1@10.242.238.88:<0.26233.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1018 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:36.035,ns_1@10.242.238.88:<0.26239.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1018 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:36.035,ns_1@10.242.238.88:<0.26240.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1018 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:36.038,ns_1@10.242.238.88:<0.26241.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1018 into 'ns_1@10.242.238.90' is <18125.19035.0> [ns_server:debug,2014-08-19T16:49:36.041,ns_1@10.242.238.88:<0.26241.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1018 into 'ns_1@10.242.238.91' is <18126.19021.0> [rebalance:debug,2014-08-19T16:49:36.041,ns_1@10.242.238.88:<0.26233.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1018 is <0.26241.0> [ns_server:debug,2014-08-19T16:49:36.075,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 154. Nacking mccouch update. [views:debug,2014-08-19T16:49:36.075,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/154. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:36.076,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",154,active,0} [ns_server:debug,2014-08-19T16:49:36.077,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,880,386,258,1008,748,620,254,982,854, 488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542,176, 904,776,410,282,644,516,878,384,256,1006,746,618,252,980,852,486,358,720,592, 226,954,826,460,332,758,694,630,566,200,992,928,864,800,498,434,370,306,732, 668,604,540,238,174,966,902,838,774,472,408,344,280,706,642,578,514,212,940, 876,812,510,446,382,318,1004,744,680,616,552,250,186,978,914,850,786,484,420, 356,292,718,654,590,526,224,160,952,888,824,458,394,330,266,1016,756,692,628, 564,198,990,926,862,798,496,432,368,304,730,666,602,538,236,172,964,900,836, 772,470,406,342,278,704,640,576,512,210,938,874,810,508,444,380,316,1002,742, 678,614,550,248,184,976,912,848,784,482,418,354,290,716,652,588,524,222,158, 950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924,860,796,494,430, 366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702,638, 574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,974,910,846, 782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326,262,1012, 752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598,534,232,168, 960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806,504,440, 376,312,738,674,610,546,244,180,972,908,844,780,478,414,350,286,712,648,584, 520,218,154,1023,946,882,818,452,388,324,260,1010,686,558,192,920,792,426, 298,660,532,166,894,400,272,1022,762,634,996,868,502,374,736,608,242,970,842, 476,348,710,582,216,944,816,450,322,684,556,190,918,790,424,296,658,530,164, 892,398,270,1020,760,632,994,866,500,372,734,606,240,968,840,474,346,708,580, 214,942,814,448,320,682,554,188,916,788,422,294,656,528,162,890,396,268,1018] [views:debug,2014-08-19T16:49:36.109,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/154. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:36.110,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",154,active,0} [ns_server:debug,2014-08-19T16:49:36.132,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452576,123095}, tap_estimate, {replica_building,"default",1018,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19035.0>, <<"replication_building_1018_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:36.149,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452576,140236}, tap_estimate, {replica_building,"default",1018,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19021.0>, <<"replication_building_1018_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:36.149,ns_1@10.242.238.88:<0.26242.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.19021.0>}, {'ns_1@10.242.238.90',<18125.19035.0>}]) [rebalance:info,2014-08-19T16:49:36.149,ns_1@10.242.238.88:<0.26233.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:36.150,ns_1@10.242.238.88:<0.26233.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1018 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:36.151,ns_1@10.242.238.88:<0.26233.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:36.151,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1018, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:36.156,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{762, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:36.156,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",762, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.26254.0>) [ns_server:debug,2014-08-19T16:49:36.156,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 762) [ns_server:debug,2014-08-19T16:49:36.156,ns_1@10.242.238.88:<0.26255.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:36.157,ns_1@10.242.238.88:<0.26255.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:36.157,ns_1@10.242.238.88:<0.26254.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 762 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:36.157,ns_1@10.242.238.88:<0.26260.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 762 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:36.157,ns_1@10.242.238.88:<0.26261.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 762 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:36.160,ns_1@10.242.238.88:<0.26262.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 762 into 'ns_1@10.242.238.91' is <18126.19041.0> [ns_server:debug,2014-08-19T16:49:36.163,ns_1@10.242.238.88:<0.26262.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 762 into 'ns_1@10.242.238.90' is <18125.19040.0> [rebalance:debug,2014-08-19T16:49:36.163,ns_1@10.242.238.88:<0.26254.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 762 is <0.26262.0> [ns_server:debug,2014-08-19T16:49:36.253,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452576,244195}, tap_estimate, {replica_building,"default",762,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19041.0>, <<"replication_building_762_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:36.273,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452576,264239}, tap_estimate, {replica_building,"default",762,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19040.0>, <<"replication_building_762_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:36.273,ns_1@10.242.238.88:<0.26263.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.19040.0>}, {'ns_1@10.242.238.91',<18126.19041.0>}]) [rebalance:info,2014-08-19T16:49:36.274,ns_1@10.242.238.88:<0.26254.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:36.274,ns_1@10.242.238.88:<0.26254.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 762 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:36.275,ns_1@10.242.238.88:<0.26254.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:36.275,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{762, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:36.281,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{506, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:36.281,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",506, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.26289.0>) [ns_server:debug,2014-08-19T16:49:36.281,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 506) [ns_server:debug,2014-08-19T16:49:36.282,ns_1@10.242.238.88:<0.26290.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:36.282,ns_1@10.242.238.88:<0.26290.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:36.282,ns_1@10.242.238.88:<0.26289.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 506 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:36.282,ns_1@10.242.238.88:<0.26295.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 506 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:36.282,ns_1@10.242.238.88:<0.26296.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 506 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:36.287,ns_1@10.242.238.88:<0.26297.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 506 into 'ns_1@10.242.238.91' is <18126.19046.0> [ns_server:debug,2014-08-19T16:49:36.289,ns_1@10.242.238.88:<0.26297.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 506 into 'ns_1@10.242.238.89' is <18124.25110.0> [rebalance:debug,2014-08-19T16:49:36.289,ns_1@10.242.238.88:<0.26289.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 506 is <0.26297.0> [ns_server:debug,2014-08-19T16:49:36.293,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 152. Nacking mccouch update. [views:debug,2014-08-19T16:49:36.293,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/152. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:36.293,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",152,active,0} [ns_server:debug,2014-08-19T16:49:36.294,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254,982, 854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542, 176,904,776,410,282,644,516,878,384,256,1006,746,618,252,980,852,486,358,720, 592,226,954,826,460,332,758,694,630,566,200,992,928,864,800,498,434,370,306, 732,668,604,540,238,174,966,902,838,774,472,408,344,280,706,642,578,514,212, 940,876,812,510,446,382,318,1004,744,680,616,552,250,186,978,914,850,786,484, 420,356,292,718,654,590,526,224,160,952,888,824,458,394,330,266,1016,756,692, 628,564,198,990,926,862,798,496,432,368,304,730,666,602,538,236,172,964,900, 836,772,470,406,342,278,704,640,576,512,210,938,874,810,508,444,380,316,1002, 742,678,614,550,248,184,976,912,848,784,482,418,354,290,716,652,588,524,222, 158,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924,860,796,494, 430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702, 638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,974,910, 846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326,262, 1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598,534,232, 168,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806,504, 440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350,286,712,648, 584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558,192,920,792, 426,298,660,532,166,894,400,272,1022,762,634,996,868,502,374,736,608,242,970, 842,476,348,710,582,216,944,816,450,322,684,556,190,918,790,424,296,658,530, 164,892,398,270,1020,760,632,994,866,500,372,734,606,240,968,840,474,346,708, 580,214,942,814,448,320,682,554,188,916,788,422,294,656,528,162,890,396,268, 1018] [views:debug,2014-08-19T16:49:36.377,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/152. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:36.377,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",152,active,0} [ns_server:debug,2014-08-19T16:49:36.379,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452576,370401}, tap_estimate, {replica_building,"default",506,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19046.0>, <<"replication_building_506_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:36.397,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452576,388407}, tap_estimate, {replica_building,"default",506,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25110.0>, <<"replication_building_506_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:36.397,ns_1@10.242.238.88:<0.26298.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25110.0>}, {'ns_1@10.242.238.91',<18126.19046.0>}]) [rebalance:info,2014-08-19T16:49:36.398,ns_1@10.242.238.88:<0.26289.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:36.398,ns_1@10.242.238.88:<0.26289.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 506 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:36.399,ns_1@10.242.238.88:<0.26289.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:36.399,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{506, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:36.404,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1017, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:36.404,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1017, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.26318.0>) [ns_server:debug,2014-08-19T16:49:36.404,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1017) [ns_server:debug,2014-08-19T16:49:36.405,ns_1@10.242.238.88:<0.26319.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:36.405,ns_1@10.242.238.88:<0.26319.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:36.405,ns_1@10.242.238.88:<0.26318.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1017 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:36.405,ns_1@10.242.238.88:<0.26324.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1017 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:36.405,ns_1@10.242.238.88:<0.26325.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1017 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:36.409,ns_1@10.242.238.88:<0.26326.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1017 into 'ns_1@10.242.238.90' is <18125.19060.0> [ns_server:debug,2014-08-19T16:49:36.411,ns_1@10.242.238.88:<0.26326.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1017 into 'ns_1@10.242.238.91' is <18126.19065.0> [rebalance:debug,2014-08-19T16:49:36.411,ns_1@10.242.238.88:<0.26318.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1017 is <0.26326.0> [ns_server:debug,2014-08-19T16:49:36.501,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452576,492447}, tap_estimate, {replica_building,"default",1017,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19060.0>, <<"replication_building_1017_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:36.521,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452576,512223}, tap_estimate, {replica_building,"default",1017,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19065.0>, <<"replication_building_1017_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:36.521,ns_1@10.242.238.88:<0.26327.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.19065.0>}, {'ns_1@10.242.238.90',<18125.19060.0>}]) [rebalance:info,2014-08-19T16:49:36.521,ns_1@10.242.238.88:<0.26318.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:36.522,ns_1@10.242.238.88:<0.26318.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1017 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:36.523,ns_1@10.242.238.88:<0.26318.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:36.523,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1017, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:36.528,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{761, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:36.528,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",761, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.26355.0>) [ns_server:debug,2014-08-19T16:49:36.528,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 761) [ns_server:debug,2014-08-19T16:49:36.529,ns_1@10.242.238.88:<0.26356.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:36.529,ns_1@10.242.238.88:<0.26356.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:36.529,ns_1@10.242.238.88:<0.26355.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 761 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:36.529,ns_1@10.242.238.88:<0.26361.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 761 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:36.529,ns_1@10.242.238.88:<0.26362.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 761 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:info,2014-08-19T16:49:36.535,ns_1@10.242.238.88:ns_doctor<0.20988.0>:ns_doctor:update_status:241]The following buckets became ready on node 'ns_1@10.242.238.91': ["default"] [ns_server:debug,2014-08-19T16:49:36.535,ns_1@10.242.238.88:<0.26363.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 761 into 'ns_1@10.242.238.91' is <18126.19099.0> [ns_server:debug,2014-08-19T16:49:36.537,ns_1@10.242.238.88:<0.26363.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 761 into 'ns_1@10.242.238.90' is <18125.19066.0> [rebalance:debug,2014-08-19T16:49:36.537,ns_1@10.242.238.88:<0.26355.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 761 is <0.26363.0> [ns_server:debug,2014-08-19T16:49:36.547,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 150. Nacking mccouch update. [views:debug,2014-08-19T16:49:36.547,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/150. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:36.547,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",150,active,0} [ns_server:debug,2014-08-19T16:49:36.549,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254,982, 854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542, 176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,980,852,486,358, 720,592,226,954,826,460,332,758,694,630,566,200,992,928,864,800,498,434,370, 306,732,668,604,540,238,174,966,902,838,774,472,408,344,280,706,642,578,514, 212,940,876,812,510,446,382,318,1004,744,680,616,552,250,186,978,914,850,786, 484,420,356,292,718,654,590,526,224,160,952,888,824,458,394,330,266,1016,756, 692,628,564,198,990,926,862,798,496,432,368,304,730,666,602,538,236,172,964, 900,836,772,470,406,342,278,704,640,576,512,210,938,874,810,508,444,380,316, 1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290,716,652,588,524, 222,158,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924,860,796, 494,430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766, 702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,974, 910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326, 262,1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598,534, 232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870,806, 504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350,286,712, 648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558,192,920, 792,426,298,660,532,166,894,400,272,1022,762,634,996,868,502,374,736,608,242, 970,842,476,348,710,582,216,944,816,450,322,684,556,190,918,790,424,296,658, 530,164,892,398,270,1020,760,632,994,866,500,372,734,606,240,968,840,474,346, 708,580,214,942,814,448,320,682,554,188,916,788,422,294,656,528,162,890,396, 268,1018] [views:debug,2014-08-19T16:49:36.581,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/150. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:36.581,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",150,active,0} [ns_server:debug,2014-08-19T16:49:36.626,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452576,617473}, tap_estimate, {replica_building,"default",761,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19099.0>, <<"replication_building_761_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:36.644,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452576,635484}, tap_estimate, {replica_building,"default",761,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19066.0>, <<"replication_building_761_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:36.645,ns_1@10.242.238.88:<0.26364.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.19066.0>}, {'ns_1@10.242.238.91',<18126.19099.0>}]) [rebalance:info,2014-08-19T16:49:36.645,ns_1@10.242.238.88:<0.26355.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:36.645,ns_1@10.242.238.88:<0.26355.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 761 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:36.646,ns_1@10.242.238.88:<0.26355.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:36.646,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{761, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:36.651,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{505, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:36.651,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",505, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.26391.0>) [ns_server:debug,2014-08-19T16:49:36.652,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 505) [ns_server:debug,2014-08-19T16:49:36.652,ns_1@10.242.238.88:<0.26392.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:36.652,ns_1@10.242.238.88:<0.26392.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:36.652,ns_1@10.242.238.88:<0.26391.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 505 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:36.652,ns_1@10.242.238.88:<0.26397.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 505 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:36.652,ns_1@10.242.238.88:<0.26398.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 505 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:36.657,ns_1@10.242.238.88:<0.26399.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 505 into 'ns_1@10.242.238.91' is <18126.19120.0> [ns_server:debug,2014-08-19T16:49:36.659,ns_1@10.242.238.88:<0.26399.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 505 into 'ns_1@10.242.238.89' is <18124.25130.0> [rebalance:debug,2014-08-19T16:49:36.659,ns_1@10.242.238.88:<0.26391.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 505 is <0.26399.0> [ns_server:debug,2014-08-19T16:49:36.681,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 148. Nacking mccouch update. [views:debug,2014-08-19T16:49:36.681,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/148. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:36.681,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",148,active,0} [ns_server:debug,2014-08-19T16:49:36.682,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254,982, 854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542, 176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,980,852,486,358, 720,592,226,954,826,460,332,758,694,630,566,200,992,928,864,800,498,434,370, 306,732,668,604,540,238,174,966,902,838,774,472,408,344,280,706,642,578,514, 212,148,940,876,812,510,446,382,318,1004,744,680,616,552,250,186,978,914,850, 786,484,420,356,292,718,654,590,526,224,160,952,888,824,458,394,330,266,1016, 756,692,628,564,198,990,926,862,798,496,432,368,304,730,666,602,538,236,172, 964,900,836,772,470,406,342,278,704,640,576,512,210,938,874,810,508,444,380, 316,1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290,716,652,588, 524,222,158,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924,860, 796,494,430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276, 766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246,182, 974,910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390, 326,262,1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598, 534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870, 806,504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350,286, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558,192, 920,792,426,298,660,532,166,894,400,272,1022,762,634,996,868,502,374,736,608, 242,970,842,476,348,710,582,216,944,816,450,322,684,556,190,918,790,424,296, 658,530,164,892,398,270,1020,760,632,994,866,500,372,734,606,240,968,840,474, 346,708,580,214,942,814,448,320,682,554,188,916,788,422,294,656,528,162,890, 396,268,1018] [views:debug,2014-08-19T16:49:36.740,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/148. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:36.740,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",148,active,0} [ns_server:debug,2014-08-19T16:49:36.749,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452576,740800}, tap_estimate, {replica_building,"default",505,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19120.0>, <<"replication_building_505_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:36.767,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452576,758911}, tap_estimate, {replica_building,"default",505,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25130.0>, <<"replication_building_505_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:36.768,ns_1@10.242.238.88:<0.26400.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25130.0>}, {'ns_1@10.242.238.91',<18126.19120.0>}]) [rebalance:info,2014-08-19T16:49:36.768,ns_1@10.242.238.88:<0.26391.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:36.769,ns_1@10.242.238.88:<0.26391.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 505 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:36.769,ns_1@10.242.238.88:<0.26391.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:36.770,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{505, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:36.775,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1016, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:36.775,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1016, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.26412.0>) [ns_server:debug,2014-08-19T16:49:36.775,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1016) [ns_server:debug,2014-08-19T16:49:36.775,ns_1@10.242.238.88:<0.26413.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:36.775,ns_1@10.242.238.88:<0.26413.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:36.776,ns_1@10.242.238.88:<0.26412.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1016 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:36.776,ns_1@10.242.238.88:<0.26423.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1016 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:36.776,ns_1@10.242.238.88:<0.26424.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1016 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:36.779,ns_1@10.242.238.88:<0.26428.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1016 into 'ns_1@10.242.238.90' is <18125.19086.0> [ns_server:debug,2014-08-19T16:49:36.782,ns_1@10.242.238.88:<0.26428.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1016 into 'ns_1@10.242.238.91' is <18126.19139.0> [rebalance:debug,2014-08-19T16:49:36.782,ns_1@10.242.238.88:<0.26412.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1016 is <0.26428.0> [ns_server:debug,2014-08-19T16:49:36.840,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 146. Nacking mccouch update. [views:debug,2014-08-19T16:49:36.840,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/146. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:36.841,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",146,active,0} [ns_server:debug,2014-08-19T16:49:36.842,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254,982, 854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542, 176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,980,852,486,358, 720,592,226,954,826,460,332,758,694,630,566,200,992,928,864,800,498,434,370, 306,732,668,604,540,238,174,966,902,838,774,472,408,344,280,706,642,578,514, 212,148,940,876,812,510,446,382,318,1004,744,680,616,552,250,186,978,914,850, 786,484,420,356,292,718,654,590,526,224,160,952,888,824,458,394,330,266,1016, 756,692,628,564,198,990,926,862,798,496,432,368,304,730,666,602,538,236,172, 964,900,836,772,470,406,342,278,704,640,576,512,210,146,938,874,810,508,444, 380,316,1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290,716,652, 588,524,222,158,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924, 860,796,494,430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340, 276,766,702,638,574,208,936,872,808,506,442,378,314,1000,740,676,612,548,246, 182,974,910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454, 390,326,262,1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662, 598,534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934, 870,806,504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350, 286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558, 192,920,792,426,298,660,532,166,894,400,272,1022,762,634,996,868,502,374,736, 608,242,970,842,476,348,710,582,216,944,816,450,322,684,556,190,918,790,424, 296,658,530,164,892,398,270,1020,760,632,994,866,500,372,734,606,240,968,840, 474,346,708,580,214,942,814,448,320,682,554,188,916,788,422,294,656,528,162, 890,396,268,1018] [ns_server:debug,2014-08-19T16:49:36.873,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452576,864354}, tap_estimate, {replica_building,"default",1016,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19086.0>, <<"replication_building_1016_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:36.890,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452576,881844}, tap_estimate, {replica_building,"default",1016,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19139.0>, <<"replication_building_1016_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:36.892,ns_1@10.242.238.88:<0.26435.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.19139.0>}, {'ns_1@10.242.238.90',<18125.19086.0>}]) [rebalance:info,2014-08-19T16:49:36.893,ns_1@10.242.238.88:<0.26412.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:36.893,ns_1@10.242.238.88:<0.26412.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1016 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:36.894,ns_1@10.242.238.88:<0.26412.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:36.894,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1016, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:36.899,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{760, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:36.899,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",760, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.26447.0>) [ns_server:debug,2014-08-19T16:49:36.899,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 760) [ns_server:debug,2014-08-19T16:49:36.900,ns_1@10.242.238.88:<0.26448.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:36.900,ns_1@10.242.238.88:<0.26448.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:36.900,ns_1@10.242.238.88:<0.26447.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 760 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:36.900,ns_1@10.242.238.88:<0.26453.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 760 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:36.900,ns_1@10.242.238.88:<0.26454.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 760 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:36.906,ns_1@10.242.238.88:<0.26455.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 760 into 'ns_1@10.242.238.91' is <18126.19161.0> [ns_server:debug,2014-08-19T16:49:36.908,ns_1@10.242.238.88:<0.26455.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 760 into 'ns_1@10.242.238.90' is <18125.19105.0> [rebalance:debug,2014-08-19T16:49:36.908,ns_1@10.242.238.88:<0.26447.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 760 is <0.26455.0> [views:debug,2014-08-19T16:49:36.924,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/146. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:36.924,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",146,active,0} [ns_server:debug,2014-08-19T16:49:36.998,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452576,989796}, tap_estimate, {replica_building,"default",760,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19161.0>, <<"replication_building_760_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:37.017,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452577,8010}, tap_estimate, {replica_building,"default",760,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19105.0>, <<"replication_building_760_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:37.017,ns_1@10.242.238.88:<0.26456.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.19105.0>}, {'ns_1@10.242.238.91',<18126.19161.0>}]) [rebalance:info,2014-08-19T16:49:37.017,ns_1@10.242.238.88:<0.26447.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:37.018,ns_1@10.242.238.88:<0.26447.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 760 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:37.018,ns_1@10.242.238.88:<0.26447.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:37.019,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{760, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:37.024,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{504, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:37.024,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",504, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.26482.0>) [ns_server:debug,2014-08-19T16:49:37.024,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 504) [ns_server:debug,2014-08-19T16:49:37.025,ns_1@10.242.238.88:<0.26483.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:37.025,ns_1@10.242.238.88:<0.26483.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:37.025,ns_1@10.242.238.88:<0.26482.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 504 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:37.025,ns_1@10.242.238.88:<0.26488.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 504 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:37.025,ns_1@10.242.238.88:<0.26489.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 504 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:37.029,ns_1@10.242.238.88:<0.26490.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 504 into 'ns_1@10.242.238.91' is <18126.19180.0> [ns_server:debug,2014-08-19T16:49:37.032,ns_1@10.242.238.88:<0.26490.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 504 into 'ns_1@10.242.238.89' is <18124.25151.0> [rebalance:debug,2014-08-19T16:49:37.032,ns_1@10.242.238.88:<0.26482.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 504 is <0.26490.0> [ns_server:debug,2014-08-19T16:49:37.099,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 144. Nacking mccouch update. [views:debug,2014-08-19T16:49:37.100,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/144. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:37.101,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",144,active,0} [ns_server:debug,2014-08-19T16:49:37.102,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254,982, 854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542, 176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,980,852,486,358, 720,592,226,954,826,460,332,694,566,200,992,928,864,800,498,434,370,306,732, 668,604,540,238,174,966,902,838,774,472,408,344,280,706,642,578,514,212,148, 940,876,812,510,446,382,318,1004,744,680,616,552,250,186,978,914,850,786,484, 420,356,292,718,654,590,526,224,160,952,888,824,458,394,330,266,1016,756,692, 628,564,198,990,926,862,798,496,432,368,304,730,666,602,538,236,172,964,900, 836,772,470,406,342,278,704,640,576,512,210,146,938,874,810,508,444,380,316, 1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290,716,652,588,524, 222,158,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924,860,796, 494,430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766, 702,638,574,208,144,936,872,808,506,442,378,314,1000,740,676,612,548,246,182, 974,910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390, 326,262,1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598, 534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,998,934,870, 806,504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350,286, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558,192, 920,792,426,298,660,532,166,894,400,272,1022,762,634,996,868,502,374,736,608, 242,970,842,476,348,710,582,216,944,816,450,322,684,556,190,918,790,424,296, 658,530,164,892,398,270,1020,760,632,994,866,500,372,734,606,240,968,840,474, 346,708,580,214,942,814,448,320,682,554,188,916,788,422,294,656,528,162,890, 396,268,1018,758,630] [ns_server:debug,2014-08-19T16:49:37.122,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452577,113417}, tap_estimate, {replica_building,"default",504,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19180.0>, <<"replication_building_504_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:37.139,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452577,130928}, tap_estimate, {replica_building,"default",504,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25151.0>, <<"replication_building_504_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:37.140,ns_1@10.242.238.88:<0.26491.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25151.0>}, {'ns_1@10.242.238.91',<18126.19180.0>}]) [rebalance:info,2014-08-19T16:49:37.140,ns_1@10.242.238.88:<0.26482.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:37.141,ns_1@10.242.238.88:<0.26482.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 504 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:37.141,ns_1@10.242.238.88:<0.26482.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:37.142,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{504, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:37.147,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1015, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:37.147,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1015, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.26503.0>) [ns_server:debug,2014-08-19T16:49:37.147,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1015) [ns_server:debug,2014-08-19T16:49:37.147,ns_1@10.242.238.88:<0.26504.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:37.148,ns_1@10.242.238.88:<0.26504.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:37.148,ns_1@10.242.238.88:<0.26503.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1015 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:37.148,ns_1@10.242.238.88:<0.26509.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1015 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:37.148,ns_1@10.242.238.88:<0.26510.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1015 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:37.152,ns_1@10.242.238.88:<0.26511.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1015 into 'ns_1@10.242.238.90' is <18125.19125.0> [ns_server:debug,2014-08-19T16:49:37.153,ns_1@10.242.238.88:<0.26511.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1015 into 'ns_1@10.242.238.91' is <18126.19185.0> [rebalance:debug,2014-08-19T16:49:37.153,ns_1@10.242.238.88:<0.26503.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1015 is <0.26511.0> [views:debug,2014-08-19T16:49:37.183,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/144. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:37.183,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",144,active,0} [ns_server:debug,2014-08-19T16:49:37.246,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452577,237093}, tap_estimate, {replica_building,"default",1015,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19125.0>, <<"replication_building_1015_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:37.265,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452577,256546}, tap_estimate, {replica_building,"default",1015,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19185.0>, <<"replication_building_1015_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:37.266,ns_1@10.242.238.88:<0.26512.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.19185.0>}, {'ns_1@10.242.238.90',<18125.19125.0>}]) [rebalance:info,2014-08-19T16:49:37.266,ns_1@10.242.238.88:<0.26503.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:37.267,ns_1@10.242.238.88:<0.26503.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1015 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:37.267,ns_1@10.242.238.88:<0.26503.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:37.268,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1015, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:37.272,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{759, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:37.272,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",759, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.26543.0>) [ns_server:debug,2014-08-19T16:49:37.273,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 759) [ns_server:debug,2014-08-19T16:49:37.273,ns_1@10.242.238.88:<0.26544.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:37.273,ns_1@10.242.238.88:<0.26544.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:37.273,ns_1@10.242.238.88:<0.26543.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 759 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:37.273,ns_1@10.242.238.88:<0.26549.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 759 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:37.273,ns_1@10.242.238.88:<0.26550.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 759 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:37.277,ns_1@10.242.238.88:<0.26551.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 759 into 'ns_1@10.242.238.91' is <18126.19205.0> [ns_server:debug,2014-08-19T16:49:37.280,ns_1@10.242.238.88:<0.26551.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 759 into 'ns_1@10.242.238.90' is <18125.19144.0> [rebalance:debug,2014-08-19T16:49:37.280,ns_1@10.242.238.88:<0.26543.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 759 is <0.26551.0> [ns_server:debug,2014-08-19T16:49:37.333,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 142. Nacking mccouch update. [views:debug,2014-08-19T16:49:37.334,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/142. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:37.334,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",142,active,0} [ns_server:debug,2014-08-19T16:49:37.335,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254,982, 854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542, 176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,980,852,486,358, 720,592,226,954,826,460,332,694,566,200,992,928,864,800,498,434,370,306,732, 668,604,540,238,174,966,902,838,774,472,408,344,280,706,642,578,514,212,148, 940,876,812,510,446,382,318,1004,744,680,616,552,250,186,978,914,850,786,484, 420,356,292,718,654,590,526,224,160,952,888,824,458,394,330,266,1016,756,692, 628,564,198,990,926,862,798,496,432,368,304,730,666,602,538,236,172,964,900, 836,772,470,406,342,278,704,640,576,512,210,146,938,874,810,508,444,380,316, 1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290,716,652,588,524, 222,158,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924,860,796, 494,430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766, 702,638,574,208,144,936,872,808,506,442,378,314,1000,740,676,612,548,246,182, 974,910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390, 326,262,1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598, 534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,998,934, 870,806,504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350, 286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558, 192,920,792,426,298,660,532,166,894,400,272,1022,762,634,996,868,502,374,736, 608,242,970,842,476,348,710,582,216,944,816,450,322,684,556,190,918,790,424, 296,658,530,164,892,398,270,1020,760,632,994,866,500,372,734,606,240,968,840, 474,346,708,580,214,942,814,448,320,682,554,188,916,788,422,294,656,528,162, 890,396,268,1018,758,630] [rebalance:info,2014-08-19T16:49:37.341,ns_1@10.242.238.88:<0.25760.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1023 state change: {'ns_1@10.242.238.88',active,paused, undefined} [rebalance:info,2014-08-19T16:49:37.341,ns_1@10.242.238.88:<0.26482.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 504 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:49:37.341,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 1023 state to active [rebalance:info,2014-08-19T16:49:37.343,ns_1@10.242.238.88:<0.25760.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1023 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:37.343,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 504 state to active [rebalance:info,2014-08-19T16:49:37.344,ns_1@10.242.238.88:<0.26482.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 504 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:37.344,ns_1@10.242.238.88:<0.25760.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:37.344,ns_1@10.242.238.88:<0.26482.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:37.370,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452577,361308}, tap_estimate, {replica_building,"default",759,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19205.0>, <<"replication_building_759_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:37.388,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452577,379381}, tap_estimate, {replica_building,"default",759,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19144.0>, <<"replication_building_759_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:37.388,ns_1@10.242.238.88:<0.26552.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.19144.0>}, {'ns_1@10.242.238.91',<18126.19205.0>}]) [rebalance:info,2014-08-19T16:49:37.388,ns_1@10.242.238.88:<0.26543.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:37.389,ns_1@10.242.238.88:<0.26543.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 759 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:37.390,ns_1@10.242.238.88:<0.26543.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:37.390,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{759, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:37.395,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{503, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:37.395,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",503, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.26572.0>) [ns_server:debug,2014-08-19T16:49:37.395,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 503) [ns_server:debug,2014-08-19T16:49:37.396,ns_1@10.242.238.88:<0.26573.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:37.396,ns_1@10.242.238.88:<0.26573.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:37.396,ns_1@10.242.238.88:<0.26572.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 503 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:37.396,ns_1@10.242.238.88:<0.26578.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 503 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:37.396,ns_1@10.242.238.88:<0.26579.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 503 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:37.400,ns_1@10.242.238.88:<0.26580.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 503 into 'ns_1@10.242.238.91' is <18126.19227.0> [ns_server:debug,2014-08-19T16:49:37.402,ns_1@10.242.238.88:<0.26580.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 503 into 'ns_1@10.242.238.89' is <18124.25174.0> [rebalance:debug,2014-08-19T16:49:37.403,ns_1@10.242.238.88:<0.26572.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 503 is <0.26580.0> [views:debug,2014-08-19T16:49:37.418,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/142. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:37.418,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",142,active,0} [ns_server:debug,2014-08-19T16:49:37.494,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452577,485149}, tap_estimate, {replica_building,"default",503,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19227.0>, <<"replication_building_503_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:37.511,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452577,502554}, tap_estimate, {replica_building,"default",503,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25174.0>, <<"replication_building_503_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:37.512,ns_1@10.242.238.88:<0.26581.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25174.0>}, {'ns_1@10.242.238.91',<18126.19227.0>}]) [rebalance:info,2014-08-19T16:49:37.512,ns_1@10.242.238.88:<0.26572.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:37.512,ns_1@10.242.238.88:<0.26572.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 503 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:37.513,ns_1@10.242.238.88:<0.26572.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:37.513,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{503, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:37.518,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1014, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:37.518,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1014, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.26607.0>) [ns_server:debug,2014-08-19T16:49:37.518,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1014) [ns_server:debug,2014-08-19T16:49:37.519,ns_1@10.242.238.88:<0.26608.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:37.519,ns_1@10.242.238.88:<0.26608.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [ns_server:debug,2014-08-19T16:49:37.519,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 140. Nacking mccouch update. [views:debug,2014-08-19T16:49:37.519,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/140. Updated state: active (0) [rebalance:info,2014-08-19T16:49:37.519,ns_1@10.242.238.88:<0.26607.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1014 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [ns_server:debug,2014-08-19T16:49:37.519,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",140,active,0} [rebalance:info,2014-08-19T16:49:37.519,ns_1@10.242.238.88:<0.26613.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1014 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:37.519,ns_1@10.242.238.88:<0.26614.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1014 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:37.520,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254,982, 854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542, 176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,980,852,486,358, 720,592,226,954,826,460,332,694,566,200,992,928,864,800,498,434,370,306,732, 668,604,540,238,174,966,902,838,774,472,408,344,280,706,642,578,514,212,148, 940,876,812,510,446,382,318,1004,744,680,616,552,250,186,978,914,850,786,484, 420,356,292,718,654,590,526,224,160,952,888,824,458,394,330,266,1016,756,692, 628,564,198,990,926,862,798,496,432,368,304,730,666,602,538,236,172,964,900, 836,772,470,406,342,278,704,640,576,512,210,146,938,874,810,508,444,380,316, 1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290,716,652,588,524, 222,158,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924,860,796, 494,430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766, 702,638,574,208,144,936,872,808,506,442,378,314,1000,740,676,612,548,246,182, 974,910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390, 326,262,1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598, 534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,998,934, 870,806,504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350, 286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558, 192,920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374, 736,608,242,970,842,476,348,710,582,216,944,816,450,322,684,556,190,918,790, 424,296,658,530,164,892,398,270,1020,760,632,994,866,500,372,734,606,240,968, 840,474,346,708,580,214,942,814,448,320,682,554,188,916,788,422,294,656,528, 162,890,396,268,1018,758,630] [ns_server:debug,2014-08-19T16:49:37.523,ns_1@10.242.238.88:<0.26615.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1014 into 'ns_1@10.242.238.90' is <18125.19187.0> [ns_server:debug,2014-08-19T16:49:37.526,ns_1@10.242.238.88:<0.26615.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1014 into 'ns_1@10.242.238.91' is <18126.19235.0> [rebalance:debug,2014-08-19T16:49:37.526,ns_1@10.242.238.88:<0.26607.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1014 is <0.26615.0> [views:debug,2014-08-19T16:49:37.595,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/140. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:37.595,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",140,active,0} [ns_server:debug,2014-08-19T16:49:37.616,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452577,607155}, tap_estimate, {replica_building,"default",1014,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19187.0>, <<"replication_building_1014_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:37.634,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452577,625227}, tap_estimate, {replica_building,"default",1014,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19235.0>, <<"replication_building_1014_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:37.634,ns_1@10.242.238.88:<0.26616.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.19235.0>}, {'ns_1@10.242.238.90',<18125.19187.0>}]) [rebalance:info,2014-08-19T16:49:37.635,ns_1@10.242.238.88:<0.26607.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:37.635,ns_1@10.242.238.88:<0.26607.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1014 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:37.636,ns_1@10.242.238.88:<0.26607.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:37.636,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1014, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:37.641,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{758, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:37.641,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",758, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.26642.0>) [ns_server:debug,2014-08-19T16:49:37.641,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 758) [ns_server:debug,2014-08-19T16:49:37.642,ns_1@10.242.238.88:<0.26643.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:37.642,ns_1@10.242.238.88:<0.26643.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:37.642,ns_1@10.242.238.88:<0.26642.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 758 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:37.642,ns_1@10.242.238.88:<0.26648.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 758 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:37.642,ns_1@10.242.238.88:<0.26649.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 758 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:37.647,ns_1@10.242.238.88:<0.26650.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 758 into 'ns_1@10.242.238.91' is <18126.19255.0> [ns_server:debug,2014-08-19T16:49:37.650,ns_1@10.242.238.88:<0.26650.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 758 into 'ns_1@10.242.238.90' is <18125.19206.0> [rebalance:debug,2014-08-19T16:49:37.650,ns_1@10.242.238.88:<0.26642.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 758 is <0.26650.0> [ns_server:debug,2014-08-19T16:49:37.661,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 138. Nacking mccouch update. [views:debug,2014-08-19T16:49:37.661,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/138. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:37.662,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",138,active,0} [ns_server:debug,2014-08-19T16:49:37.663,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254,982, 854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542, 176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,980,852,486,358, 720,592,226,954,826,460,332,694,566,200,992,928,864,800,498,434,370,306,732, 668,604,540,238,174,966,902,838,774,472,408,344,280,706,642,578,514,212,148, 940,876,812,510,446,382,318,1004,744,680,616,552,250,186,978,914,850,786,484, 420,356,292,718,654,590,526,224,160,952,888,824,458,394,330,266,1016,756,692, 628,564,198,990,926,862,798,496,432,368,304,730,666,602,538,236,172,964,900, 836,772,470,406,342,278,704,640,576,512,210,146,938,874,810,508,444,380,316, 1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290,716,652,588,524, 222,158,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924,860,796, 494,430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766, 702,638,574,208,144,936,872,808,506,442,378,314,1000,740,676,612,548,246,182, 974,910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390, 326,262,1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598, 534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,998,934, 870,806,504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350, 286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558, 192,920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374, 736,608,242,970,842,476,348,710,582,216,944,816,450,322,684,556,190,918,790, 424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734,606,240, 968,840,474,346,708,580,214,942,814,448,320,682,554,188,916,788,422,294,656, 528,162,890,396,268,1018,758,630] [views:debug,2014-08-19T16:49:37.696,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/138. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:37.696,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",138,active,0} [ns_server:debug,2014-08-19T16:49:37.742,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452577,733023}, tap_estimate, {replica_building,"default",758,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19255.0>, <<"replication_building_758_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:37.759,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452577,750649}, tap_estimate, {replica_building,"default",758,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19206.0>, <<"replication_building_758_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:37.760,ns_1@10.242.238.88:<0.26651.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.19206.0>}, {'ns_1@10.242.238.91',<18126.19255.0>}]) [rebalance:info,2014-08-19T16:49:37.760,ns_1@10.242.238.88:<0.26642.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:37.761,ns_1@10.242.238.88:<0.26642.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 758 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:37.761,ns_1@10.242.238.88:<0.26642.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:37.762,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{758, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:37.767,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{502, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:37.767,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",502, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.26677.0>) [ns_server:debug,2014-08-19T16:49:37.767,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 502) [ns_server:debug,2014-08-19T16:49:37.767,ns_1@10.242.238.88:<0.26678.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:37.768,ns_1@10.242.238.88:<0.26678.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:37.768,ns_1@10.242.238.88:<0.26677.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 502 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:37.768,ns_1@10.242.238.88:<0.26683.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 502 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:37.768,ns_1@10.242.238.88:<0.26684.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 502 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:37.771,ns_1@10.242.238.88:<0.26685.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 502 into 'ns_1@10.242.238.91' is <18126.19260.0> [ns_server:debug,2014-08-19T16:49:37.774,ns_1@10.242.238.88:<0.26685.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 502 into 'ns_1@10.242.238.89' is <18124.25194.0> [rebalance:debug,2014-08-19T16:49:37.774,ns_1@10.242.238.88:<0.26677.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 502 is <0.26685.0> [ns_server:debug,2014-08-19T16:49:37.796,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 136. Nacking mccouch update. [views:debug,2014-08-19T16:49:37.796,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/136. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:37.796,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",136,active,0} [ns_server:debug,2014-08-19T16:49:37.797,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254,982, 854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542, 176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,980,852,486,358, 720,592,226,954,826,460,332,694,566,200,992,928,864,800,498,434,370,306,732, 668,604,540,238,174,966,902,838,774,472,408,344,280,706,642,578,514,212,148, 940,876,812,510,446,382,318,1004,744,680,616,552,250,186,978,914,850,786,484, 420,356,292,718,654,590,526,224,160,952,888,824,458,394,330,266,1016,756,692, 628,564,198,990,926,862,798,496,432,368,304,730,666,602,538,236,172,964,900, 836,772,470,406,342,278,704,640,576,512,210,146,938,874,810,508,444,380,316, 1002,742,678,614,550,248,184,976,912,848,784,482,418,354,290,716,652,588,524, 222,158,950,886,822,456,392,328,264,1014,754,690,626,562,196,988,924,860,796, 494,430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766, 702,638,574,208,144,936,872,808,506,442,378,314,1000,740,676,612,548,246,182, 974,910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390, 326,262,1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598, 534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,998,934, 870,806,504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350, 286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558, 192,920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374, 736,608,242,970,842,476,348,710,582,216,944,816,450,322,684,556,190,918,790, 424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734,606,240, 968,840,474,346,708,580,214,942,814,448,320,682,554,188,916,788,422,294,656, 528,162,890,396,268,1018,758,630,136] [views:debug,2014-08-19T16:49:37.830,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/136. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:37.830,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",136,active,0} [ns_server:debug,2014-08-19T16:49:37.866,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452577,856974}, tap_estimate, {replica_building,"default",502,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19260.0>, <<"replication_building_502_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:37.885,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452577,876142}, tap_estimate, {replica_building,"default",502,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25194.0>, <<"replication_building_502_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:37.885,ns_1@10.242.238.88:<0.26686.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25194.0>}, {'ns_1@10.242.238.91',<18126.19260.0>}]) [rebalance:info,2014-08-19T16:49:37.885,ns_1@10.242.238.88:<0.26677.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:37.886,ns_1@10.242.238.88:<0.26677.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 502 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:37.886,ns_1@10.242.238.88:<0.26677.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:37.887,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{502, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:37.891,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1013, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:37.891,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1013, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.26712.0>) [ns_server:debug,2014-08-19T16:49:37.892,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1013) [ns_server:debug,2014-08-19T16:49:37.892,ns_1@10.242.238.88:<0.26713.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:37.892,ns_1@10.242.238.88:<0.26713.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:37.892,ns_1@10.242.238.88:<0.26712.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1013 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:37.892,ns_1@10.242.238.88:<0.26718.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1013 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:37.893,ns_1@10.242.238.88:<0.26719.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1013 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:37.896,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 134. Nacking mccouch update. [views:debug,2014-08-19T16:49:37.897,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/134. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:37.897,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",134,active,0} [ns_server:debug,2014-08-19T16:49:37.897,ns_1@10.242.238.88:<0.26720.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1013 into 'ns_1@10.242.238.90' is <18125.19227.0> [ns_server:debug,2014-08-19T16:49:37.898,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254,982, 854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542, 176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,980,852,486,358, 720,592,226,954,826,460,332,694,566,200,928,800,434,306,732,668,604,540,238, 174,966,902,838,774,472,408,344,280,706,642,578,514,212,148,940,876,812,510, 446,382,318,1004,744,680,616,552,250,186,978,914,850,786,484,420,356,292,718, 654,590,526,224,160,952,888,824,458,394,330,266,1016,756,692,628,564,198,134, 990,926,862,798,496,432,368,304,730,666,602,538,236,172,964,900,836,772,470, 406,342,278,704,640,576,512,210,146,938,874,810,508,444,380,316,1002,742,678, 614,550,248,184,976,912,848,784,482,418,354,290,716,652,588,524,222,158,950, 886,822,456,392,328,264,1014,754,690,626,562,196,988,924,860,796,494,430,366, 302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702,638,574, 208,144,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,974,910,846, 782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326,262,1012, 752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598,534,232,168, 960,896,832,768,466,402,338,274,764,700,636,572,206,142,998,934,870,806,504, 440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350,286,712,648, 584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558,192,920,792, 426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374,736,608,242, 970,842,476,348,710,582,216,944,816,450,322,684,556,190,918,790,424,296,658, 530,164,892,398,270,1020,760,632,138,994,866,500,372,734,606,240,968,840,474, 346,708,580,214,942,814,448,320,682,554,188,916,788,422,294,656,528,162,890, 396,268,1018,758,630,136,992,864,498,370] [ns_server:debug,2014-08-19T16:49:37.898,ns_1@10.242.238.88:<0.26720.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1013 into 'ns_1@10.242.238.91' is <18126.19279.0> [rebalance:debug,2014-08-19T16:49:37.899,ns_1@10.242.238.88:<0.26712.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1013 is <0.26720.0> [views:debug,2014-08-19T16:49:37.932,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/134. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:37.932,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",134,active,0} [ns_server:debug,2014-08-19T16:49:37.951,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,1013, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,502, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_502_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_502_'ns_1@10.242.238.91'">>}]}, {move_state,758, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_758_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_758_'ns_1@10.242.238.91'">>}]}, {move_state,1014, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1014_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1014_'ns_1@10.242.238.90'">>}]}, {move_state,503, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_503_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_503_'ns_1@10.242.238.91'">>}]}, {move_state,759, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_759_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_759_'ns_1@10.242.238.91'">>}]}, {move_state,1015, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1015_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1015_'ns_1@10.242.238.90'">>}]}, {move_state,504, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_504_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_504_'ns_1@10.242.238.91'">>}]}, {move_state,760, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_760_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_760_'ns_1@10.242.238.91'">>}]}, {move_state,1016, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1016_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1016_'ns_1@10.242.238.90'">>}]}, {move_state,505, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_505_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_505_'ns_1@10.242.238.91'">>}]}, {move_state,761, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_761_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_761_'ns_1@10.242.238.91'">>}]}, {move_state,1017, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1017_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1017_'ns_1@10.242.238.90'">>}]}, {move_state,506, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_506_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_506_'ns_1@10.242.238.91'">>}]}, {move_state,762, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_762_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_762_'ns_1@10.242.238.91'">>}]}, {move_state,1018, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1018_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1018_'ns_1@10.242.238.90'">>}]}, {move_state,507, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_507_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_507_'ns_1@10.242.238.91'">>}]}, {move_state,763, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_763_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_763_'ns_1@10.242.238.91'">>}]}, {move_state,1019, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1019_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1019_'ns_1@10.242.238.90'">>}]}, {move_state,508, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_508_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_508_'ns_1@10.242.238.91'">>}]}, {move_state,764, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_764_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_764_'ns_1@10.242.238.91'">>}]}, {move_state,1020, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1020_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1020_'ns_1@10.242.238.90'">>}]}, {move_state,509, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_509_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_509_'ns_1@10.242.238.91'">>}]}, {move_state,765, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_765_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_765_'ns_1@10.242.238.91'">>}]}, {move_state,1021, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1021_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1021_'ns_1@10.242.238.90'">>}]}, {move_state,510, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_510_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_510_'ns_1@10.242.238.91'">>}]}, {move_state,766, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_766_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_766_'ns_1@10.242.238.91'">>}]}, {move_state,1022, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1022_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1022_'ns_1@10.242.238.90'">>}]}, {move_state,511, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_511_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_511_'ns_1@10.242.238.91'">>}]}, {move_state,767, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_767_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_767_'ns_1@10.242.238.91'">>}]}, {move_state,1023, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1023_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1023_'ns_1@10.242.238.90'">>}]}] [ns_server:debug,2014-08-19T16:49:37.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 502, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:37.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 758, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:37.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1014, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:37.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 503, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:37.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 759, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:37.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1015, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:37.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 504, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:37.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 760, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:37.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1016, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:37.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 505, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:37.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 761, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:37.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1017, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:37.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 506, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:37.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 762, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:37.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1018, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:37.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 507, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:37.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 763, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:37.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1019, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:37.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 508, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:37.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 764, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:37.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1020, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:37.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 509, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:37.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 765, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:37.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1021, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:37.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 510, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:37.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 766, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:37.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1022, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:37.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 511, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:37.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 767, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:37.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1023, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:37.990,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452577,981516}, tap_estimate, {replica_building,"default",1013,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19227.0>, <<"replication_building_1013_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:38.010,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452578,1278}, tap_estimate, {replica_building,"default",1013,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19279.0>, <<"replication_building_1013_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:38.010,ns_1@10.242.238.88:<0.26721.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.19279.0>}, {'ns_1@10.242.238.90',<18125.19227.0>}]) [rebalance:info,2014-08-19T16:49:38.011,ns_1@10.242.238.88:<0.26712.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:38.011,ns_1@10.242.238.88:<0.26712.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1013 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:38.012,ns_1@10.242.238.88:<0.26712.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:38.012,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1013, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:38.017,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{757, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:38.017,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",757, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.26778.0>) [ns_server:debug,2014-08-19T16:49:38.017,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 757) [ns_server:debug,2014-08-19T16:49:38.017,ns_1@10.242.238.88:<0.26779.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:38.018,ns_1@10.242.238.88:<0.26779.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:38.018,ns_1@10.242.238.88:<0.26778.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 757 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:38.018,ns_1@10.242.238.88:<0.26784.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 757 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:38.018,ns_1@10.242.238.88:<0.26785.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 757 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:38.023,ns_1@10.242.238.88:<0.26786.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 757 into 'ns_1@10.242.238.91' is <18126.19299.0> [ns_server:debug,2014-08-19T16:49:38.025,ns_1@10.242.238.88:<0.26786.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 757 into 'ns_1@10.242.238.90' is <18125.19246.0> [rebalance:debug,2014-08-19T16:49:38.025,ns_1@10.242.238.88:<0.26778.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 757 is <0.26786.0> [ns_server:debug,2014-08-19T16:49:38.092,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 132. Nacking mccouch update. [views:debug,2014-08-19T16:49:38.092,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/132. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:38.092,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",132,active,0} [ns_server:debug,2014-08-19T16:49:38.094,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254,982, 854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542, 176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,980,852,486,358, 720,592,226,954,826,460,332,694,566,200,928,800,434,306,732,668,604,540,238, 174,966,902,838,774,472,408,344,280,706,642,578,514,212,148,940,876,812,510, 446,382,318,1004,744,680,616,552,250,186,978,914,850,786,484,420,356,292,718, 654,590,526,224,160,952,888,824,458,394,330,266,1016,756,692,628,564,198,134, 990,926,862,798,496,432,368,304,730,666,602,538,236,172,964,900,836,772,470, 406,342,278,704,640,576,512,210,146,938,874,810,508,444,380,316,1002,742,678, 614,550,248,184,976,912,848,784,482,418,354,290,716,652,588,524,222,158,950, 886,822,456,392,328,264,1014,754,690,626,562,196,132,988,924,860,796,494,430, 366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702,638, 574,208,144,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,974,910, 846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326,262, 1012,752,688,624,560,194,986,922,858,794,492,428,364,300,726,662,598,534,232, 168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,998,934,870,806, 504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350,286,712, 648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558,192,920, 792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374,736,608, 242,970,842,476,348,710,582,216,944,816,450,322,684,556,190,918,790,424,296, 658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734,606,240,968,840, 474,346,708,580,214,942,814,448,320,682,554,188,916,788,422,294,656,528,162, 890,396,268,1018,758,630,136,992,864,498,370] [ns_server:debug,2014-08-19T16:49:38.116,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452578,107209}, tap_estimate, {replica_building,"default",757,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19299.0>, <<"replication_building_757_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:38.133,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452578,124833}, tap_estimate, {replica_building,"default",757,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19246.0>, <<"replication_building_757_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:38.134,ns_1@10.242.238.88:<0.26787.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.19246.0>}, {'ns_1@10.242.238.91',<18126.19299.0>}]) [rebalance:info,2014-08-19T16:49:38.134,ns_1@10.242.238.88:<0.26778.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:38.135,ns_1@10.242.238.88:<0.26778.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 757 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:38.135,ns_1@10.242.238.88:<0.26778.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:38.136,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{757, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:38.141,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{501, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:38.141,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",501, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.26799.0>) [ns_server:debug,2014-08-19T16:49:38.141,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 501) [ns_server:debug,2014-08-19T16:49:38.142,ns_1@10.242.238.88:<0.26800.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:38.142,ns_1@10.242.238.88:<0.26800.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:38.142,ns_1@10.242.238.88:<0.26799.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 501 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:38.142,ns_1@10.242.238.88:<0.26805.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 501 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:38.142,ns_1@10.242.238.88:<0.26806.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 501 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:38.147,ns_1@10.242.238.88:<0.26807.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 501 into 'ns_1@10.242.238.91' is <18126.19318.0> [ns_server:debug,2014-08-19T16:49:38.150,ns_1@10.242.238.88:<0.26807.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 501 into 'ns_1@10.242.238.89' is <18124.25220.0> [rebalance:debug,2014-08-19T16:49:38.150,ns_1@10.242.238.88:<0.26799.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 501 is <0.26807.0> [views:debug,2014-08-19T16:49:38.176,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/132. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:38.176,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",132,active,0} [ns_server:debug,2014-08-19T16:49:38.241,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452578,232666}, tap_estimate, {replica_building,"default",501,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19318.0>, <<"replication_building_501_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:38.258,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452578,249579}, tap_estimate, {replica_building,"default",501,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25220.0>, <<"replication_building_501_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:38.259,ns_1@10.242.238.88:<0.26808.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25220.0>}, {'ns_1@10.242.238.91',<18126.19318.0>}]) [rebalance:info,2014-08-19T16:49:38.259,ns_1@10.242.238.88:<0.26799.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:38.259,ns_1@10.242.238.88:<0.26799.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 501 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:38.260,ns_1@10.242.238.88:<0.26799.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:38.260,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{501, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:38.267,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1012, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:38.267,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1012, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.26834.0>) [ns_server:debug,2014-08-19T16:49:38.267,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1012) [ns_server:debug,2014-08-19T16:49:38.267,ns_1@10.242.238.88:<0.26835.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:38.268,ns_1@10.242.238.88:<0.26835.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:38.268,ns_1@10.242.238.88:<0.26834.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1012 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:38.268,ns_1@10.242.238.88:<0.26840.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1012 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:38.268,ns_1@10.242.238.88:<0.26841.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1012 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:38.272,ns_1@10.242.238.88:<0.26842.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1012 into 'ns_1@10.242.238.90' is <18125.19266.0> [ns_server:debug,2014-08-19T16:49:38.275,ns_1@10.242.238.88:<0.26842.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1012 into 'ns_1@10.242.238.91' is <18126.19337.0> [rebalance:debug,2014-08-19T16:49:38.275,ns_1@10.242.238.88:<0.26834.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1012 is <0.26842.0> [ns_server:debug,2014-08-19T16:49:38.351,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 130. Nacking mccouch update. [views:debug,2014-08-19T16:49:38.351,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/130. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:38.352,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",130,active,0} [ns_server:debug,2014-08-19T16:49:38.353,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804,438, 310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254,982, 854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670,542, 176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,980,852,486,358, 720,592,226,954,826,460,332,694,566,200,928,800,434,306,732,668,604,540,238, 174,966,902,838,774,472,408,344,280,706,642,578,514,212,148,940,876,812,510, 446,382,318,1004,744,680,616,552,250,186,978,914,850,786,484,420,356,292,718, 654,590,526,224,160,952,888,824,458,394,330,266,1016,756,692,628,564,198,134, 990,926,862,798,496,432,368,304,730,666,602,538,236,172,964,900,836,772,470, 406,342,278,704,640,576,512,210,146,938,874,810,508,444,380,316,1002,742,678, 614,550,248,184,976,912,848,784,482,418,354,290,716,652,588,524,222,158,950, 886,822,456,392,328,264,1014,754,690,626,562,196,132,988,924,860,796,494,430, 366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702,638, 574,208,144,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,974,910, 846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326,262, 1012,752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662,598,534, 232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,998,934,870, 806,504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350,286, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558,192, 920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374,736, 608,242,970,842,476,348,710,582,216,944,816,450,322,684,556,190,918,790,424, 296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734,606,240,968, 840,474,346,708,580,214,942,814,448,320,682,554,188,916,788,422,294,656,528, 162,890,396,268,1018,758,630,136,992,864,498,370] [ns_server:debug,2014-08-19T16:49:38.366,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452578,357590}, tap_estimate, {replica_building,"default",1012,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19266.0>, <<"replication_building_1012_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:38.384,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452578,375770}, tap_estimate, {replica_building,"default",1012,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19337.0>, <<"replication_building_1012_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:38.385,ns_1@10.242.238.88:<0.26843.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.19337.0>}, {'ns_1@10.242.238.90',<18125.19266.0>}]) [rebalance:info,2014-08-19T16:49:38.385,ns_1@10.242.238.88:<0.26834.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:38.385,ns_1@10.242.238.88:<0.26834.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1012 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:38.386,ns_1@10.242.238.88:<0.26834.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:38.387,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1012, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:38.391,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{756, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:38.391,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",756, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.26855.0>) [ns_server:debug,2014-08-19T16:49:38.391,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 756) [ns_server:debug,2014-08-19T16:49:38.392,ns_1@10.242.238.88:<0.26856.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:38.392,ns_1@10.242.238.88:<0.26856.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:38.392,ns_1@10.242.238.88:<0.26855.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 756 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:38.392,ns_1@10.242.238.88:<0.26861.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 756 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:38.392,ns_1@10.242.238.88:<0.26862.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 756 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:38.396,ns_1@10.242.238.88:<0.26863.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 756 into 'ns_1@10.242.238.91' is <18126.19343.0> [ns_server:debug,2014-08-19T16:49:38.398,ns_1@10.242.238.88:<0.26863.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 756 into 'ns_1@10.242.238.90' is <18125.19271.0> [rebalance:debug,2014-08-19T16:49:38.398,ns_1@10.242.238.88:<0.26855.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 756 is <0.26863.0> [ns_server:info,2014-08-19T16:49:38.422,ns_1@10.242.238.88:ns_doctor<0.20988.0>:ns_doctor:update_status:241]The following buckets became ready on node 'ns_1@10.242.238.90': ["default"] [views:debug,2014-08-19T16:49:38.427,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/130. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:38.428,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",130,active,0} [ns_server:debug,2014-08-19T16:49:38.490,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452578,481002}, tap_estimate, {replica_building,"default",756,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19343.0>, <<"replication_building_756_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:38.509,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452578,500118}, tap_estimate, {replica_building,"default",756,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19271.0>, <<"replication_building_756_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:38.509,ns_1@10.242.238.88:<0.26864.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.19271.0>}, {'ns_1@10.242.238.91',<18126.19343.0>}]) [rebalance:info,2014-08-19T16:49:38.509,ns_1@10.242.238.88:<0.26855.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:38.510,ns_1@10.242.238.88:<0.26855.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 756 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:38.510,ns_1@10.242.238.88:<0.26855.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:38.511,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{756, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:38.516,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{500, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:38.517,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",500, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.26891.0>) [ns_server:debug,2014-08-19T16:49:38.517,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 500) [ns_server:debug,2014-08-19T16:49:38.517,ns_1@10.242.238.88:<0.26892.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:38.517,ns_1@10.242.238.88:<0.26892.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:38.517,ns_1@10.242.238.88:<0.26891.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 500 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:38.518,ns_1@10.242.238.88:<0.26897.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 500 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:38.518,ns_1@10.242.238.88:<0.26898.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 500 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:38.521,ns_1@10.242.238.88:<0.26899.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 500 into 'ns_1@10.242.238.91' is <18126.19362.0> [ns_server:debug,2014-08-19T16:49:38.523,ns_1@10.242.238.88:<0.26899.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 500 into 'ns_1@10.242.238.89' is <18124.25240.0> [rebalance:debug,2014-08-19T16:49:38.523,ns_1@10.242.238.88:<0.26891.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 500 is <0.26899.0> [ns_server:debug,2014-08-19T16:49:38.577,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 128. Nacking mccouch update. [views:debug,2014-08-19T16:49:38.577,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/128. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:38.577,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",128,active,0} [ns_server:debug,2014-08-19T16:49:38.578,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804, 438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254, 982,854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308,670, 542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,980,852,486, 358,720,592,226,954,826,460,332,694,566,200,928,800,434,306,732,668,604,540, 238,174,966,902,838,774,472,408,344,280,706,642,578,514,212,148,940,876,812, 510,446,382,318,1004,744,680,616,552,250,186,978,914,850,786,484,420,356,292, 718,654,590,526,224,160,952,888,824,458,394,330,266,1016,756,692,628,564,198, 134,990,926,862,798,496,432,368,304,730,666,602,538,236,172,964,900,836,772, 470,406,342,278,704,640,576,512,210,146,938,874,810,508,444,380,316,1002,742, 678,614,550,248,184,976,912,848,784,482,418,354,290,716,652,588,524,222,158, 950,886,822,456,392,328,264,1014,754,690,626,562,196,132,988,924,860,796,494, 430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702, 638,574,208,144,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,974, 910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326, 262,1012,752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662,598, 534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,998,934, 870,806,504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350, 286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558, 192,920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374, 736,608,242,970,842,476,348,710,582,216,944,816,450,322,684,556,190,918,790, 424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734,606,240, 968,840,474,346,708,580,214,942,814,448,320,682,554,188,916,788,422,294,656, 528,162,890,396,268,1018,758,630,136,992,864,498,370] [ns_server:debug,2014-08-19T16:49:38.615,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452578,606149}, tap_estimate, {replica_building,"default",500,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19362.0>, <<"replication_building_500_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:38.634,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452578,625009}, tap_estimate, {replica_building,"default",500,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25240.0>, <<"replication_building_500_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:38.634,ns_1@10.242.238.88:<0.26900.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25240.0>}, {'ns_1@10.242.238.91',<18126.19362.0>}]) [rebalance:info,2014-08-19T16:49:38.634,ns_1@10.242.238.88:<0.26891.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:38.635,ns_1@10.242.238.88:<0.26891.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 500 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:38.635,ns_1@10.242.238.88:<0.26891.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:38.636,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{500, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:38.641,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1011, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:38.641,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1011, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.26912.0>) [ns_server:debug,2014-08-19T16:49:38.641,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1011) [ns_server:debug,2014-08-19T16:49:38.641,ns_1@10.242.238.88:<0.26913.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:38.642,ns_1@10.242.238.88:<0.26913.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:38.642,ns_1@10.242.238.88:<0.26912.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1011 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:38.642,ns_1@10.242.238.88:<0.26918.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1011 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:38.642,ns_1@10.242.238.88:<0.26919.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1011 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:38.645,ns_1@10.242.238.88:<0.26920.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1011 into 'ns_1@10.242.238.90' is <18125.19301.0> [ns_server:debug,2014-08-19T16:49:38.647,ns_1@10.242.238.88:<0.26920.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1011 into 'ns_1@10.242.238.91' is <18126.19374.0> [rebalance:debug,2014-08-19T16:49:38.647,ns_1@10.242.238.88:<0.26912.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1011 is <0.26920.0> [views:debug,2014-08-19T16:49:38.652,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/128. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:38.653,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",128,active,0} [ns_server:debug,2014-08-19T16:49:38.740,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452578,731033}, tap_estimate, {replica_building,"default",1011,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19301.0>, <<"replication_building_1011_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:38.757,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452578,748420}, tap_estimate, {replica_building,"default",1011,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19374.0>, <<"replication_building_1011_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:38.757,ns_1@10.242.238.88:<0.26921.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.19374.0>}, {'ns_1@10.242.238.90',<18125.19301.0>}]) [rebalance:info,2014-08-19T16:49:38.758,ns_1@10.242.238.88:<0.26912.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:38.758,ns_1@10.242.238.88:<0.26912.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1011 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:38.759,ns_1@10.242.238.88:<0.26912.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:38.759,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1011, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:38.764,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{755, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:38.764,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",755, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.26947.0>) [ns_server:debug,2014-08-19T16:49:38.764,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 755) [ns_server:debug,2014-08-19T16:49:38.764,ns_1@10.242.238.88:<0.26948.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:38.764,ns_1@10.242.238.88:<0.26948.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:38.764,ns_1@10.242.238.88:<0.26947.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 755 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:38.765,ns_1@10.242.238.88:<0.26953.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 755 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:38.765,ns_1@10.242.238.88:<0.26954.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 755 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:38.769,ns_1@10.242.238.88:<0.26955.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 755 into 'ns_1@10.242.238.91' is <18126.19396.0> [ns_server:debug,2014-08-19T16:49:38.771,ns_1@10.242.238.88:<0.26955.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 755 into 'ns_1@10.242.238.90' is <18125.19320.0> [rebalance:debug,2014-08-19T16:49:38.772,ns_1@10.242.238.88:<0.26947.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 755 is <0.26955.0> [ns_server:debug,2014-08-19T16:49:38.829,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 126. Nacking mccouch update. [views:debug,2014-08-19T16:49:38.829,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/126. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:38.830,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",126,active,0} [ns_server:debug,2014-08-19T16:49:38.831,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804, 438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254, 126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308, 670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,980,852, 486,358,720,592,226,954,826,460,332,694,566,200,928,800,434,306,732,668,604, 540,238,174,966,902,838,774,472,408,344,280,706,642,578,514,212,148,940,876, 812,510,446,382,318,1004,744,680,616,552,250,186,978,914,850,786,484,420,356, 292,718,654,590,526,224,160,952,888,824,458,394,330,266,1016,756,692,628,564, 198,134,990,926,862,798,496,432,368,304,730,666,602,538,236,172,964,900,836, 772,470,406,342,278,704,640,576,512,210,146,938,874,810,508,444,380,316,1002, 742,678,614,550,248,184,976,912,848,784,482,418,354,290,716,652,588,524,222, 158,950,886,822,456,392,328,264,1014,754,690,626,562,196,132,988,924,860,796, 494,430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766, 702,638,574,208,144,936,872,808,506,442,378,314,1000,740,676,612,548,246,182, 974,910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390, 326,262,1012,752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662, 598,534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,998, 934,870,806,504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414, 350,286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686, 558,192,920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502, 374,736,608,242,970,842,476,348,710,582,216,944,816,450,322,684,556,190,918, 790,424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734,606, 240,968,840,474,346,708,580,214,942,814,448,320,682,554,188,916,788,422,294, 656,528,162,890,396,268,1018,758,630,136,992,864,498,370] [ns_server:debug,2014-08-19T16:49:38.863,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452578,854096}, tap_estimate, {replica_building,"default",755,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19396.0>, <<"replication_building_755_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:38.881,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452578,872816}, tap_estimate, {replica_building,"default",755,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19320.0>, <<"replication_building_755_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:38.882,ns_1@10.242.238.88:<0.26956.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.19320.0>}, {'ns_1@10.242.238.91',<18126.19396.0>}]) [rebalance:info,2014-08-19T16:49:38.882,ns_1@10.242.238.88:<0.26947.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:38.883,ns_1@10.242.238.88:<0.26947.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 755 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:38.883,ns_1@10.242.238.88:<0.26947.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:38.884,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{755, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:38.888,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{499, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:38.888,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",499, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.26968.0>) [ns_server:debug,2014-08-19T16:49:38.889,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 499) [ns_server:debug,2014-08-19T16:49:38.889,ns_1@10.242.238.88:<0.26969.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:38.889,ns_1@10.242.238.88:<0.26969.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:38.889,ns_1@10.242.238.88:<0.26968.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 499 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:38.889,ns_1@10.242.238.88:<0.26974.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 499 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:38.890,ns_1@10.242.238.88:<0.26975.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 499 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:38.896,ns_1@10.242.238.88:<0.26976.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 499 into 'ns_1@10.242.238.91' is <18126.19415.0> [ns_server:debug,2014-08-19T16:49:38.900,ns_1@10.242.238.88:<0.26976.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 499 into 'ns_1@10.242.238.89' is <18124.25260.0> [rebalance:debug,2014-08-19T16:49:38.900,ns_1@10.242.238.88:<0.26968.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 499 is <0.26976.0> [views:debug,2014-08-19T16:49:38.905,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/126. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:38.905,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",126,active,0} [ns_server:debug,2014-08-19T16:49:38.989,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452578,980307}, tap_estimate, {replica_building,"default",499,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19415.0>, <<"replication_building_499_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:39.007,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452578,998953}, tap_estimate, {replica_building,"default",499,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25260.0>, <<"replication_building_499_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:39.008,ns_1@10.242.238.88:<0.26977.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25260.0>}, {'ns_1@10.242.238.91',<18126.19415.0>}]) [rebalance:info,2014-08-19T16:49:39.008,ns_1@10.242.238.88:<0.26968.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:39.009,ns_1@10.242.238.88:<0.26968.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 499 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:39.009,ns_1@10.242.238.88:<0.26968.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:39.010,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{499, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:39.014,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1010, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:39.014,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1010, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.27003.0>) [ns_server:debug,2014-08-19T16:49:39.015,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1010) [ns_server:debug,2014-08-19T16:49:39.015,ns_1@10.242.238.88:<0.27004.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:39.015,ns_1@10.242.238.88:<0.27004.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:39.015,ns_1@10.242.238.88:<0.27003.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1010 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:39.015,ns_1@10.242.238.88:<0.27009.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1010 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:39.015,ns_1@10.242.238.88:<0.27010.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1010 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:39.019,ns_1@10.242.238.88:<0.27011.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1010 into 'ns_1@10.242.238.90' is <18125.19354.0> [ns_server:debug,2014-08-19T16:49:39.022,ns_1@10.242.238.88:<0.27011.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1010 into 'ns_1@10.242.238.91' is <18126.19434.0> [rebalance:debug,2014-08-19T16:49:39.022,ns_1@10.242.238.88:<0.27003.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1010 is <0.27011.0> [ns_server:debug,2014-08-19T16:49:39.047,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 124. Nacking mccouch update. [views:debug,2014-08-19T16:49:39.047,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/124. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:39.047,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",124,active,0} [ns_server:debug,2014-08-19T16:49:39.048,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804, 438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254, 126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308, 670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,124,980, 852,486,358,720,592,226,954,826,460,332,694,566,200,928,800,434,306,668,540, 174,966,902,838,774,472,408,344,280,706,642,578,514,212,148,940,876,812,510, 446,382,318,1004,744,680,616,552,250,186,978,914,850,786,484,420,356,292,718, 654,590,526,224,160,952,888,824,458,394,330,266,1016,756,692,628,564,198,134, 990,926,862,798,496,432,368,304,730,666,602,538,236,172,964,900,836,772,470, 406,342,278,704,640,576,512,210,146,938,874,810,508,444,380,316,1002,742,678, 614,550,248,184,976,912,848,784,482,418,354,290,716,652,588,524,222,158,950, 886,822,456,392,328,264,1014,754,690,626,562,196,132,988,924,860,796,494,430, 366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702,638, 574,208,144,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,974,910, 846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326,262, 1012,752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662,598,534, 232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,998,934,870, 806,504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350,286, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558,192, 920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374,736, 608,242,970,842,476,348,710,582,216,944,816,450,322,684,556,190,918,790,424, 296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734,606,240,968, 840,474,346,708,580,214,942,814,448,320,682,554,188,916,788,422,294,656,528, 162,890,396,268,1018,758,630,136,992,864,498,370,732,604,238] [views:debug,2014-08-19T16:49:39.090,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/124. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:39.091,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",124,active,0} [ns_server:debug,2014-08-19T16:49:39.113,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452579,104204}, tap_estimate, {replica_building,"default",1010,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19354.0>, <<"replication_building_1010_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:39.130,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452579,121934}, tap_estimate, {replica_building,"default",1010,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19434.0>, <<"replication_building_1010_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:39.131,ns_1@10.242.238.88:<0.27012.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.19434.0>}, {'ns_1@10.242.238.90',<18125.19354.0>}]) [rebalance:info,2014-08-19T16:49:39.131,ns_1@10.242.238.88:<0.27003.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:39.132,ns_1@10.242.238.88:<0.27003.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1010 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:39.132,ns_1@10.242.238.88:<0.27003.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:39.133,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1010, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:39.138,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{754, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:39.138,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",754, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.27038.0>) [ns_server:debug,2014-08-19T16:49:39.138,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 754) [ns_server:debug,2014-08-19T16:49:39.138,ns_1@10.242.238.88:<0.27039.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:39.138,ns_1@10.242.238.88:<0.27039.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:39.138,ns_1@10.242.238.88:<0.27038.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 754 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:39.139,ns_1@10.242.238.88:<0.27044.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 754 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:39.139,ns_1@10.242.238.88:<0.27045.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 754 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:39.144,ns_1@10.242.238.88:<0.27046.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 754 into 'ns_1@10.242.238.91' is <18126.19454.0> [ns_server:debug,2014-08-19T16:49:39.148,ns_1@10.242.238.88:<0.27046.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 754 into 'ns_1@10.242.238.90' is <18125.19373.0> [rebalance:debug,2014-08-19T16:49:39.148,ns_1@10.242.238.88:<0.27038.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 754 is <0.27046.0> [ns_server:info,2014-08-19T16:49:39.155,ns_1@10.242.238.88:<0.20999.0>:ns_orchestrator:handle_info:428]Skipping janitor in state rebalancing [ns_server:debug,2014-08-19T16:49:39.166,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 122. Nacking mccouch update. [views:debug,2014-08-19T16:49:39.166,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/122. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:39.166,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",122,active,0} [ns_server:debug,2014-08-19T16:49:39.167,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804, 438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254, 126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308, 670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,124,980, 852,486,358,720,592,226,954,826,460,332,694,566,200,928,800,434,306,668,540, 174,966,902,838,774,472,408,344,280,706,642,578,514,212,148,940,876,812,510, 446,382,318,1004,744,680,616,552,250,186,122,978,914,850,786,484,420,356,292, 718,654,590,526,224,160,952,888,824,458,394,330,266,1016,756,692,628,564,198, 134,990,926,862,798,496,432,368,304,730,666,602,538,236,172,964,900,836,772, 470,406,342,278,704,640,576,512,210,146,938,874,810,508,444,380,316,1002,742, 678,614,550,248,184,976,912,848,784,482,418,354,290,716,652,588,524,222,158, 950,886,822,456,392,328,264,1014,754,690,626,562,196,132,988,924,860,796,494, 430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702, 638,574,208,144,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,974, 910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326, 262,1012,752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662,598, 534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,998,934, 870,806,504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414,350, 286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558, 192,920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374, 736,608,242,970,842,476,348,710,582,216,944,816,450,322,684,556,190,918,790, 424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734,606,240, 968,840,474,346,708,580,214,942,814,448,320,682,554,188,916,788,422,294,656, 528,162,890,396,268,1018,758,630,136,992,864,498,370,732,604,238] [views:debug,2014-08-19T16:49:39.200,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/122. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:39.200,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",122,active,0} [ns_server:debug,2014-08-19T16:49:39.240,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452579,231250}, tap_estimate, {replica_building,"default",754,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19454.0>, <<"replication_building_754_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:39.257,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452579,248432}, tap_estimate, {replica_building,"default",754,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19373.0>, <<"replication_building_754_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:39.258,ns_1@10.242.238.88:<0.27047.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.19373.0>}, {'ns_1@10.242.238.91',<18126.19454.0>}]) [rebalance:info,2014-08-19T16:49:39.258,ns_1@10.242.238.88:<0.27038.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:39.258,ns_1@10.242.238.88:<0.27038.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 754 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:39.259,ns_1@10.242.238.88:<0.27038.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:39.259,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{754, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:39.264,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{498, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:39.264,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",498, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.27078.0>) [ns_server:debug,2014-08-19T16:49:39.264,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 498) [ns_server:debug,2014-08-19T16:49:39.265,ns_1@10.242.238.88:<0.27079.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:39.265,ns_1@10.242.238.88:<0.27079.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:39.265,ns_1@10.242.238.88:<0.27078.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 498 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:39.265,ns_1@10.242.238.88:<0.27084.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 498 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:39.265,ns_1@10.242.238.88:<0.27085.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 498 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:39.270,ns_1@10.242.238.88:<0.27086.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 498 into 'ns_1@10.242.238.91' is <18126.19459.0> [ns_server:debug,2014-08-19T16:49:39.272,ns_1@10.242.238.88:<0.27086.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 498 into 'ns_1@10.242.238.89' is <18124.25281.0> [rebalance:debug,2014-08-19T16:49:39.272,ns_1@10.242.238.88:<0.27078.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 498 is <0.27086.0> [ns_server:debug,2014-08-19T16:49:39.275,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 120. Nacking mccouch update. [views:debug,2014-08-19T16:49:39.275,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/120. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:39.276,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",120,active,0} [ns_server:debug,2014-08-19T16:49:39.276,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804, 438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254, 126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308, 670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,124,980, 852,486,358,720,592,226,954,826,460,332,694,566,200,928,800,434,306,668,540, 174,966,902,838,774,472,408,344,280,706,642,578,514,212,148,940,876,812,510, 446,382,318,1004,744,680,616,552,250,186,122,978,914,850,786,484,420,356,292, 718,654,590,526,224,160,952,888,824,458,394,330,266,1016,756,692,628,564,198, 134,990,926,862,798,496,432,368,304,730,666,602,538,236,172,964,900,836,772, 470,406,342,278,704,640,576,512,210,146,938,874,810,508,444,380,316,1002,742, 678,614,550,248,184,120,976,912,848,784,482,418,354,290,716,652,588,524,222, 158,950,886,822,456,392,328,264,1014,754,690,626,562,196,132,988,924,860,796, 494,430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766, 702,638,574,208,144,936,872,808,506,442,378,314,1000,740,676,612,548,246,182, 974,910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390, 326,262,1012,752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662, 598,534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,998, 934,870,806,504,440,376,312,738,674,610,546,244,180,972,908,844,780,478,414, 350,286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686, 558,192,920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502, 374,736,608,242,970,842,476,348,710,582,216,944,816,450,322,684,556,190,918, 790,424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734,606, 240,968,840,474,346,708,580,214,942,814,448,320,682,554,188,916,788,422,294, 656,528,162,890,396,268,1018,758,630,136,992,864,498,370,732,604,238] [views:debug,2014-08-19T16:49:39.309,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/120. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:39.309,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",120,active,0} [ns_server:debug,2014-08-19T16:49:39.363,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452579,354967}, tap_estimate, {replica_building,"default",498,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19459.0>, <<"replication_building_498_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:39.381,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452579,372501}, tap_estimate, {replica_building,"default",498,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25281.0>, <<"replication_building_498_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:39.382,ns_1@10.242.238.88:<0.27087.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25281.0>}, {'ns_1@10.242.238.91',<18126.19459.0>}]) [rebalance:info,2014-08-19T16:49:39.382,ns_1@10.242.238.88:<0.27078.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:39.382,ns_1@10.242.238.88:<0.27078.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 498 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:39.383,ns_1@10.242.238.88:<0.27078.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:39.383,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{498, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:39.384,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 118. Nacking mccouch update. [views:debug,2014-08-19T16:49:39.384,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/118. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:39.384,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",118,active,0} [ns_server:debug,2014-08-19T16:49:39.385,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804, 438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254, 126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308, 670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,124,980, 852,486,358,720,592,226,954,826,460,332,694,566,200,928,800,434,306,668,540, 174,966,902,838,774,472,408,344,280,706,642,578,514,212,148,940,876,812,510, 446,382,318,1004,744,680,616,552,250,186,122,978,914,850,786,484,420,356,292, 718,654,590,526,224,160,952,888,824,458,394,330,266,1016,756,692,628,564,198, 134,990,926,862,798,496,432,368,304,730,666,602,538,236,172,964,900,836,772, 470,406,342,278,704,640,576,512,210,146,938,874,810,508,444,380,316,1002,742, 678,614,550,248,184,120,976,912,848,784,482,418,354,290,716,652,588,524,222, 158,950,886,822,456,392,328,264,1014,754,690,626,562,196,132,988,924,860,796, 494,430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766, 702,638,574,208,144,936,872,808,506,442,378,314,1000,740,676,612,548,246,182, 118,974,910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454, 390,326,262,1012,752,688,624,560,194,130,986,922,858,794,492,428,364,300,726, 662,598,534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142, 998,934,870,806,504,440,376,312,738,674,610,546,244,180,972,908,844,780,478, 414,350,286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010, 686,558,192,920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868, 502,374,736,608,242,970,842,476,348,710,582,216,944,816,450,322,684,556,190, 918,790,424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734, 606,240,968,840,474,346,708,580,214,942,814,448,320,682,554,188,916,788,422, 294,656,528,162,890,396,268,1018,758,630,136,992,864,498,370,732,604,238] [ns_server:debug,2014-08-19T16:49:39.388,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1009, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:39.388,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1009, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.27113.0>) [ns_server:debug,2014-08-19T16:49:39.388,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1009) [ns_server:debug,2014-08-19T16:49:39.389,ns_1@10.242.238.88:<0.27114.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:39.389,ns_1@10.242.238.88:<0.27114.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:39.389,ns_1@10.242.238.88:<0.27113.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1009 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:39.389,ns_1@10.242.238.88:<0.27119.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1009 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:39.389,ns_1@10.242.238.88:<0.27120.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1009 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:39.394,ns_1@10.242.238.88:<0.27121.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1009 into 'ns_1@10.242.238.90' is <18125.19393.0> [ns_server:debug,2014-08-19T16:49:39.395,ns_1@10.242.238.88:<0.27121.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1009 into 'ns_1@10.242.238.91' is <18126.19478.0> [rebalance:debug,2014-08-19T16:49:39.395,ns_1@10.242.238.88:<0.27113.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1009 is <0.27121.0> [views:debug,2014-08-19T16:49:39.443,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/118. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:39.443,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",118,active,0} [ns_server:debug,2014-08-19T16:49:39.487,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452579,478355}, tap_estimate, {replica_building,"default",1009,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19393.0>, <<"replication_building_1009_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:39.505,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452579,496089}, tap_estimate, {replica_building,"default",1009,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19478.0>, <<"replication_building_1009_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:39.505,ns_1@10.242.238.88:<0.27122.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.19478.0>}, {'ns_1@10.242.238.90',<18125.19393.0>}]) [rebalance:info,2014-08-19T16:49:39.505,ns_1@10.242.238.88:<0.27113.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:39.506,ns_1@10.242.238.88:<0.27113.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1009 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:39.506,ns_1@10.242.238.88:<0.27113.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:39.507,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1009, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:39.511,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{753, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:39.511,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",753, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.27148.0>) [ns_server:debug,2014-08-19T16:49:39.511,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 753) [ns_server:debug,2014-08-19T16:49:39.512,ns_1@10.242.238.88:<0.27149.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:39.512,ns_1@10.242.238.88:<0.27149.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:39.512,ns_1@10.242.238.88:<0.27148.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 753 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:39.512,ns_1@10.242.238.88:<0.27154.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 753 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:39.512,ns_1@10.242.238.88:<0.27155.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 753 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:39.518,ns_1@10.242.238.88:<0.27156.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 753 into 'ns_1@10.242.238.91' is <18126.19484.0> [ns_server:debug,2014-08-19T16:49:39.520,ns_1@10.242.238.88:<0.27156.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 753 into 'ns_1@10.242.238.90' is <18125.19419.0> [rebalance:debug,2014-08-19T16:49:39.520,ns_1@10.242.238.88:<0.27148.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 753 is <0.27156.0> [ns_server:debug,2014-08-19T16:49:39.535,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 116. Nacking mccouch update. [views:debug,2014-08-19T16:49:39.535,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/116. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:39.535,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",116,active,0} [ns_server:debug,2014-08-19T16:49:39.536,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804, 438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254, 126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308, 670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,124,980, 852,486,358,720,592,226,954,826,460,332,694,566,200,928,800,434,306,668,540, 174,966,902,838,774,472,408,344,280,706,642,578,514,212,148,940,876,812,510, 446,382,318,1004,744,680,616,552,250,186,122,978,914,850,786,484,420,356,292, 718,654,590,526,224,160,952,888,824,458,394,330,266,1016,756,692,628,564,198, 134,990,926,862,798,496,432,368,304,730,666,602,538,236,172,964,900,836,772, 470,406,342,278,704,640,576,512,210,146,938,874,810,508,444,380,316,1002,742, 678,614,550,248,184,120,976,912,848,784,482,418,354,290,716,652,588,524,222, 158,950,886,822,456,392,328,264,1014,754,690,626,562,196,132,988,924,860,796, 494,430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766, 702,638,574,208,144,936,872,808,506,442,378,314,1000,740,676,612,548,246,182, 118,974,910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454, 390,326,262,1012,752,688,624,560,194,130,986,922,858,794,492,428,364,300,726, 662,598,534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142, 998,934,870,806,504,440,376,312,738,674,610,546,244,180,116,972,908,844,780, 478,414,350,286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260, 1010,686,558,192,920,792,426,298,660,532,166,894,400,272,1022,762,634,140, 996,868,502,374,736,608,242,970,842,476,348,710,582,216,944,816,450,322,684, 556,190,918,790,424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500, 372,734,606,240,968,840,474,346,708,580,214,942,814,448,320,682,554,188,916, 788,422,294,656,528,162,890,396,268,1018,758,630,136,992,864,498,370,732,604, 238] [views:debug,2014-08-19T16:49:39.585,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/116. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:39.586,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",116,active,0} [ns_server:debug,2014-08-19T16:49:39.612,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452579,603145}, tap_estimate, {replica_building,"default",753,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19484.0>, <<"replication_building_753_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:39.629,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452579,620743}, tap_estimate, {replica_building,"default",753,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19419.0>, <<"replication_building_753_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:39.630,ns_1@10.242.238.88:<0.27157.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.19419.0>}, {'ns_1@10.242.238.91',<18126.19484.0>}]) [rebalance:info,2014-08-19T16:49:39.630,ns_1@10.242.238.88:<0.27148.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:39.631,ns_1@10.242.238.88:<0.27148.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 753 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:39.631,ns_1@10.242.238.88:<0.27148.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:39.632,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{753, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:39.638,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{497, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:39.638,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",497, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.27169.0>) [ns_server:debug,2014-08-19T16:49:39.638,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 497) [ns_server:debug,2014-08-19T16:49:39.638,ns_1@10.242.238.88:<0.27170.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:39.638,ns_1@10.242.238.88:<0.27170.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:39.638,ns_1@10.242.238.88:<0.27169.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 497 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:39.639,ns_1@10.242.238.88:<0.27175.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 497 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:39.639,ns_1@10.242.238.88:<0.27176.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 497 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:39.643,ns_1@10.242.238.88:<0.27177.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 497 into 'ns_1@10.242.238.91' is <18126.19503.0> [ns_server:debug,2014-08-19T16:49:39.646,ns_1@10.242.238.88:<0.27177.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 497 into 'ns_1@10.242.238.89' is <18124.25301.0> [rebalance:debug,2014-08-19T16:49:39.646,ns_1@10.242.238.88:<0.27169.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 497 is <0.27177.0> [ns_server:debug,2014-08-19T16:49:39.730,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 114. Nacking mccouch update. [views:debug,2014-08-19T16:49:39.730,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/114. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:39.731,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",114,active,0} [ns_server:debug,2014-08-19T16:49:39.732,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804, 438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254, 126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308, 670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,124,980, 852,486,358,720,592,226,954,826,460,332,694,566,200,928,800,434,306,668,540, 174,902,774,408,280,706,642,578,514,212,148,940,876,812,510,446,382,318,1004, 744,680,616,552,250,186,122,978,914,850,786,484,420,356,292,718,654,590,526, 224,160,952,888,824,458,394,330,266,1016,756,692,628,564,198,134,990,926,862, 798,496,432,368,304,730,666,602,538,236,172,964,900,836,772,470,406,342,278, 704,640,576,512,210,146,938,874,810,508,444,380,316,1002,742,678,614,550,248, 184,120,976,912,848,784,482,418,354,290,716,652,588,524,222,158,950,886,822, 456,392,328,264,1014,754,690,626,562,196,132,988,924,860,796,494,430,366,302, 728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702,638,574,208, 144,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,118,974,910,846, 782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326,262,1012, 752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662,598,534,232, 168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,998,934,870,806, 504,440,376,312,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558,192, 920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374,736, 608,242,114,970,842,476,348,710,582,216,944,816,450,322,684,556,190,918,790, 424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734,606,240, 968,840,474,346,708,580,214,942,814,448,320,682,554,188,916,788,422,294,656, 528,162,890,396,268,1018,758,630,136,992,864,498,370,732,604,238,966,838,472, 344] [ns_server:debug,2014-08-19T16:49:39.736,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452579,727898}, tap_estimate, {replica_building,"default",497,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19503.0>, <<"replication_building_497_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:39.756,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452579,747577}, tap_estimate, {replica_building,"default",497,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25301.0>, <<"replication_building_497_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:39.757,ns_1@10.242.238.88:<0.27178.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25301.0>}, {'ns_1@10.242.238.91',<18126.19503.0>}]) [rebalance:info,2014-08-19T16:49:39.757,ns_1@10.242.238.88:<0.27169.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:39.757,ns_1@10.242.238.88:<0.27169.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 497 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:39.758,ns_1@10.242.238.88:<0.27169.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:39.758,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{497, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:39.763,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1008, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:39.763,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1008, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.27204.0>) [ns_server:debug,2014-08-19T16:49:39.763,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1008) [ns_server:debug,2014-08-19T16:49:39.764,ns_1@10.242.238.88:<0.27205.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:39.764,ns_1@10.242.238.88:<0.27205.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:39.764,ns_1@10.242.238.88:<0.27204.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1008 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:39.764,ns_1@10.242.238.88:<0.27210.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1008 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:39.764,ns_1@10.242.238.88:<0.27211.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1008 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:39.768,ns_1@10.242.238.88:<0.27212.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1008 into 'ns_1@10.242.238.90' is <18125.19430.0> [ns_server:debug,2014-08-19T16:49:39.769,ns_1@10.242.238.88:<0.27212.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1008 into 'ns_1@10.242.238.91' is <18126.19522.0> [rebalance:debug,2014-08-19T16:49:39.770,ns_1@10.242.238.88:<0.27204.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1008 is <0.27212.0> [views:debug,2014-08-19T16:49:39.797,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/114. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:39.798,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",114,active,0} [ns_server:debug,2014-08-19T16:49:39.861,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452579,852509}, tap_estimate, {replica_building,"default",1008,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19430.0>, <<"replication_building_1008_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:39.880,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452579,871032}, tap_estimate, {replica_building,"default",1008,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19522.0>, <<"replication_building_1008_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:39.880,ns_1@10.242.238.88:<0.27213.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.19522.0>}, {'ns_1@10.242.238.90',<18125.19430.0>}]) [rebalance:info,2014-08-19T16:49:39.880,ns_1@10.242.238.88:<0.27204.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:39.881,ns_1@10.242.238.88:<0.27204.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1008 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:39.881,ns_1@10.242.238.88:<0.27204.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:39.882,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1008, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:39.886,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{752, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:39.886,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",752, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.27239.0>) [ns_server:debug,2014-08-19T16:49:39.886,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 752) [ns_server:debug,2014-08-19T16:49:39.887,ns_1@10.242.238.88:<0.27240.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:39.887,ns_1@10.242.238.88:<0.27240.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:39.887,ns_1@10.242.238.88:<0.27239.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 752 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:39.887,ns_1@10.242.238.88:<0.27245.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 752 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:39.887,ns_1@10.242.238.88:<0.27246.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 752 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:39.890,ns_1@10.242.238.88:<0.27247.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 752 into 'ns_1@10.242.238.91' is <18126.19528.0> [ns_server:debug,2014-08-19T16:49:39.893,ns_1@10.242.238.88:<0.27247.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 752 into 'ns_1@10.242.238.90' is <18125.19444.0> [rebalance:debug,2014-08-19T16:49:39.893,ns_1@10.242.238.88:<0.27239.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 752 is <0.27247.0> [ns_server:debug,2014-08-19T16:49:39.931,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 112. Nacking mccouch update. [views:debug,2014-08-19T16:49:39.931,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/112. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:39.931,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",112,active,0} [ns_server:debug,2014-08-19T16:49:39.933,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804, 438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254, 126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308, 670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,124,980, 852,486,358,720,592,226,954,826,460,332,694,566,200,928,800,434,306,668,540, 174,902,774,408,280,706,642,578,514,212,148,940,876,812,510,446,382,318,1004, 744,680,616,552,250,186,122,978,914,850,786,484,420,356,292,718,654,590,526, 224,160,952,888,824,458,394,330,266,1016,756,692,628,564,198,134,990,926,862, 798,496,432,368,304,730,666,602,538,236,172,964,900,836,772,470,406,342,278, 704,640,576,512,210,146,938,874,810,508,444,380,316,1002,742,678,614,550,248, 184,120,976,912,848,784,482,418,354,290,716,652,588,524,222,158,950,886,822, 456,392,328,264,1014,754,690,626,562,196,132,988,924,860,796,494,430,366,302, 728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702,638,574,208, 144,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,118,974,910,846, 782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326,262,1012, 752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662,598,534,232, 168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,998,934,870,806, 504,440,376,312,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558,192, 920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374,736, 608,242,114,970,842,476,348,710,582,216,944,816,450,322,684,556,190,918,790, 424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734,606,240, 112,968,840,474,346,708,580,214,942,814,448,320,682,554,188,916,788,422,294, 656,528,162,890,396,268,1018,758,630,136,992,864,498,370,732,604,238,966,838, 472,344] [ns_server:debug,2014-08-19T16:49:39.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452579,975635}, tap_estimate, {replica_building,"default",752,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19528.0>, <<"replication_building_752_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:40.002,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452579,993119}, tap_estimate, {replica_building,"default",752,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19444.0>, <<"replication_building_752_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:40.002,ns_1@10.242.238.88:<0.27248.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.19444.0>}, {'ns_1@10.242.238.91',<18126.19528.0>}]) [rebalance:info,2014-08-19T16:49:40.002,ns_1@10.242.238.88:<0.27239.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:40.003,ns_1@10.242.238.88:<0.27239.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 752 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:40.003,ns_1@10.242.238.88:<0.27239.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:40.004,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{752, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [views:debug,2014-08-19T16:49:40.007,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/112. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:40.007,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",112,active,0} [ns_server:debug,2014-08-19T16:49:40.010,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{496, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:40.010,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",496, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.27260.0>) [ns_server:debug,2014-08-19T16:49:40.011,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 496) [ns_server:debug,2014-08-19T16:49:40.011,ns_1@10.242.238.88:<0.27261.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:40.011,ns_1@10.242.238.88:<0.27261.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:40.011,ns_1@10.242.238.88:<0.27260.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 496 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:40.012,ns_1@10.242.238.88:<0.27266.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 496 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:40.012,ns_1@10.242.238.88:<0.27267.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 496 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:40.015,ns_1@10.242.238.88:<0.27268.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 496 into 'ns_1@10.242.238.91' is <18126.19547.0> [ns_server:debug,2014-08-19T16:49:40.017,ns_1@10.242.238.88:<0.27268.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 496 into 'ns_1@10.242.238.89' is <18124.25345.0> [rebalance:debug,2014-08-19T16:49:40.017,ns_1@10.242.238.88:<0.27260.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 496 is <0.27268.0> [ns_server:debug,2014-08-19T16:49:40.109,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452580,100765}, tap_estimate, {replica_building,"default",496,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19547.0>, <<"replication_building_496_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:40.128,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452580,119020}, tap_estimate, {replica_building,"default",496,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25345.0>, <<"replication_building_496_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:40.128,ns_1@10.242.238.88:<0.27269.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25345.0>}, {'ns_1@10.242.238.91',<18126.19547.0>}]) [rebalance:info,2014-08-19T16:49:40.128,ns_1@10.242.238.88:<0.27260.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:40.129,ns_1@10.242.238.88:<0.27260.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 496 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:40.129,ns_1@10.242.238.88:<0.27260.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:40.130,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{496, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:40.134,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1007, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:40.135,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1007, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.27295.0>) [ns_server:debug,2014-08-19T16:49:40.135,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1007) [ns_server:debug,2014-08-19T16:49:40.135,ns_1@10.242.238.88:<0.27296.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:40.135,ns_1@10.242.238.88:<0.27296.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:40.135,ns_1@10.242.238.88:<0.27295.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1007 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:40.135,ns_1@10.242.238.88:<0.27301.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1007 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:40.135,ns_1@10.242.238.88:<0.27302.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1007 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:40.139,ns_1@10.242.238.88:<0.27303.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1007 into 'ns_1@10.242.238.90' is <18125.19464.0> [ns_server:debug,2014-08-19T16:49:40.142,ns_1@10.242.238.88:<0.27303.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1007 into 'ns_1@10.242.238.91' is <18126.19566.0> [rebalance:debug,2014-08-19T16:49:40.142,ns_1@10.242.238.88:<0.27295.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1007 is <0.27303.0> [ns_server:debug,2014-08-19T16:49:40.157,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 110. Nacking mccouch update. [views:debug,2014-08-19T16:49:40.157,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/110. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:40.157,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",110,active,0} [ns_server:debug,2014-08-19T16:49:40.159,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804, 438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254, 126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308, 670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,124,980, 852,486,358,720,592,226,954,826,460,332,694,566,200,928,800,434,306,668,540, 174,902,774,408,280,706,642,578,514,212,148,940,876,812,510,446,382,318,1004, 744,680,616,552,250,186,122,978,914,850,786,484,420,356,292,718,654,590,526, 224,160,952,888,824,458,394,330,266,1016,756,692,628,564,198,134,990,926,862, 798,496,432,368,304,730,666,602,538,236,172,964,900,836,772,470,406,342,278, 704,640,576,512,210,146,938,874,810,508,444,380,316,1002,742,678,614,550,248, 184,120,976,912,848,784,482,418,354,290,716,652,588,524,222,158,950,886,822, 456,392,328,264,1014,754,690,626,562,196,132,988,924,860,796,494,430,366,302, 728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702,638,574,208, 144,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,118,974,910,846, 782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326,262,1012, 752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662,598,534,232, 168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,998,934,870,806, 504,440,376,312,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558,192, 920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374,736, 608,242,114,970,842,476,348,710,582,216,944,816,450,322,684,556,190,918,790, 424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734,606,240, 112,968,840,474,346,708,580,214,942,814,448,320,682,554,188,916,788,422,294, 656,528,162,890,396,268,1018,758,630,136,992,864,498,370,732,604,238,110,966, 838,472,344] [ns_server:debug,2014-08-19T16:49:40.234,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452580,225355}, tap_estimate, {replica_building,"default",1007,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19464.0>, <<"replication_building_1007_'ns_1@10.242.238.90'">>} [views:debug,2014-08-19T16:49:40.241,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/110. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:40.241,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",110,active,0} [ns_server:debug,2014-08-19T16:49:40.253,ns_1@10.242.238.88:<0.27304.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.19566.0>}, {'ns_1@10.242.238.90',<18125.19464.0>}]) [rebalance:info,2014-08-19T16:49:40.253,ns_1@10.242.238.88:<0.27295.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:40.253,ns_1@10.242.238.88:<0.27295.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1007 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:40.254,ns_1@10.242.238.88:<0.27295.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:40.254,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1007, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:40.259,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{751, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:40.259,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",751, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.27316.0>) [ns_server:debug,2014-08-19T16:49:40.259,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452580,243499}, tap_estimate, {replica_building,"default",1007,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19566.0>, <<"replication_building_1007_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:40.259,ns_1@10.242.238.88:<0.27317.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:40.259,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 751) [ns_server:debug,2014-08-19T16:49:40.259,ns_1@10.242.238.88:<0.27317.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:40.259,ns_1@10.242.238.88:<0.27316.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 751 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:40.260,ns_1@10.242.238.88:<0.27322.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 751 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:40.260,ns_1@10.242.238.88:<0.27323.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 751 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:40.265,ns_1@10.242.238.88:<0.27324.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 751 into 'ns_1@10.242.238.91' is <18126.19586.0> [ns_server:debug,2014-08-19T16:49:40.266,ns_1@10.242.238.88:<0.27324.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 751 into 'ns_1@10.242.238.90' is <18125.19483.0> [rebalance:debug,2014-08-19T16:49:40.266,ns_1@10.242.238.88:<0.27316.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 751 is <0.27324.0> [ns_server:debug,2014-08-19T16:49:40.359,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452580,350449}, tap_estimate, {replica_building,"default",751,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19586.0>, <<"replication_building_751_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:40.378,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452580,369672}, tap_estimate, {replica_building,"default",751,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19483.0>, <<"replication_building_751_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:40.379,ns_1@10.242.238.88:<0.27325.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.19483.0>}, {'ns_1@10.242.238.91',<18126.19586.0>}]) [rebalance:info,2014-08-19T16:49:40.379,ns_1@10.242.238.88:<0.27316.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:40.379,ns_1@10.242.238.88:<0.27316.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 751 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:40.380,ns_1@10.242.238.88:<0.27316.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:40.380,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{751, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:40.385,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{495, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:40.385,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",495, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.27351.0>) [ns_server:debug,2014-08-19T16:49:40.385,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 495) [ns_server:debug,2014-08-19T16:49:40.386,ns_1@10.242.238.88:<0.27352.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:40.386,ns_1@10.242.238.88:<0.27352.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:40.386,ns_1@10.242.238.88:<0.27351.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 495 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:40.386,ns_1@10.242.238.88:<0.27357.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 495 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:40.386,ns_1@10.242.238.88:<0.27358.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 495 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:40.392,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 108. Nacking mccouch update. [ns_server:debug,2014-08-19T16:49:40.392,ns_1@10.242.238.88:<0.27359.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 495 into 'ns_1@10.242.238.91' is <18126.19605.0> [views:debug,2014-08-19T16:49:40.392,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/108. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:40.392,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",108,active,0} [ns_server:debug,2014-08-19T16:49:40.393,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804, 438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254, 126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308, 670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,124,980, 852,486,358,720,592,226,954,826,460,332,694,566,200,928,800,434,306,668,540, 174,902,774,408,280,706,642,578,514,212,148,940,876,812,510,446,382,318,1004, 744,680,616,552,250,186,122,978,914,850,786,484,420,356,292,718,654,590,526, 224,160,952,888,824,458,394,330,266,1016,756,692,628,564,198,134,990,926,862, 798,496,432,368,304,730,666,602,538,236,172,108,964,900,836,772,470,406,342, 278,704,640,576,512,210,146,938,874,810,508,444,380,316,1002,742,678,614,550, 248,184,120,976,912,848,784,482,418,354,290,716,652,588,524,222,158,950,886, 822,456,392,328,264,1014,754,690,626,562,196,132,988,924,860,796,494,430,366, 302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702,638,574, 208,144,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,118,974,910, 846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326,262, 1012,752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662,598,534, 232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,998,934,870, 806,504,440,376,312,738,674,610,546,244,180,116,972,908,844,780,478,414,350, 286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558, 192,920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374, 736,608,242,114,970,842,476,348,710,582,216,944,816,450,322,684,556,190,918, 790,424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734,606, 240,112,968,840,474,346,708,580,214,942,814,448,320,682,554,188,916,788,422, 294,656,528,162,890,396,268,1018,758,630,136,992,864,498,370,732,604,238,110, 966,838,472,344] [ns_server:debug,2014-08-19T16:49:40.394,ns_1@10.242.238.88:<0.27359.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 495 into 'ns_1@10.242.238.89' is <18124.25365.0> [rebalance:debug,2014-08-19T16:49:40.394,ns_1@10.242.238.88:<0.27351.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 495 is <0.27359.0> [views:debug,2014-08-19T16:49:40.467,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/108. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:40.467,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",108,active,0} [ns_server:debug,2014-08-19T16:49:40.486,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452580,477353}, tap_estimate, {replica_building,"default",495,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19605.0>, <<"replication_building_495_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:40.503,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452580,494855}, tap_estimate, {replica_building,"default",495,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25365.0>, <<"replication_building_495_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:40.504,ns_1@10.242.238.88:<0.27360.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25365.0>}, {'ns_1@10.242.238.91',<18126.19605.0>}]) [rebalance:info,2014-08-19T16:49:40.504,ns_1@10.242.238.88:<0.27351.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:40.505,ns_1@10.242.238.88:<0.27351.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 495 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:40.505,ns_1@10.242.238.88:<0.27351.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:40.506,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{495, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:40.510,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1006, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:40.510,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1006, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.27372.0>) [ns_server:debug,2014-08-19T16:49:40.510,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1006) [ns_server:debug,2014-08-19T16:49:40.511,ns_1@10.242.238.88:<0.27373.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:40.511,ns_1@10.242.238.88:<0.27373.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:40.511,ns_1@10.242.238.88:<0.27372.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1006 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:40.511,ns_1@10.242.238.88:<0.27378.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1006 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:40.511,ns_1@10.242.238.88:<0.27379.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1006 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:40.518,ns_1@10.242.238.88:<0.27380.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1006 into 'ns_1@10.242.238.90' is <18125.19517.0> [ns_server:debug,2014-08-19T16:49:40.520,ns_1@10.242.238.88:<0.27380.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1006 into 'ns_1@10.242.238.91' is <18126.19610.0> [rebalance:debug,2014-08-19T16:49:40.520,ns_1@10.242.238.88:<0.27372.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1006 is <0.27380.0> [ns_server:debug,2014-08-19T16:49:40.577,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1021. Nacking mccouch update. [views:debug,2014-08-19T16:49:40.577,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1021. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:40.577,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1021,active,0} [ns_server:debug,2014-08-19T16:49:40.578,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804, 438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254, 126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308, 670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,124,980, 852,486,358,720,592,226,954,826,460,332,694,566,200,928,800,434,306,668,540, 174,902,774,408,280,706,642,578,514,212,148,940,876,812,510,446,382,318,1004, 744,680,616,552,250,186,122,978,914,850,786,484,420,356,292,718,654,590,526, 224,160,952,888,824,458,394,330,266,1016,756,692,628,564,198,134,990,926,862, 798,496,432,368,304,730,666,602,538,236,172,108,964,900,836,772,470,406,342, 278,704,640,576,512,210,146,938,874,810,508,444,380,316,1002,742,678,614,550, 248,184,120,976,912,848,784,482,418,354,290,716,652,588,524,222,158,950,886, 822,456,392,328,264,1014,754,690,626,562,196,132,988,924,860,796,494,430,366, 302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702,638,574, 208,144,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,118,974,910, 846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326,262, 1012,752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662,598,534, 232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,998,934,870, 806,504,440,376,312,738,674,610,546,244,180,116,972,908,844,780,478,414,350, 286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558, 192,920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374, 736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,684,556,190, 918,790,424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734, 606,240,112,968,840,474,346,708,580,214,942,814,448,320,682,554,188,916,788, 422,294,656,528,162,890,396,268,1018,758,630,136,992,864,498,370,732,604,238, 110,966,838,472,344] [ns_server:debug,2014-08-19T16:49:40.612,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452580,603547}, tap_estimate, {replica_building,"default",1006,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19517.0>, <<"replication_building_1006_'ns_1@10.242.238.90'">>} [views:debug,2014-08-19T16:49:40.619,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1021. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:40.620,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1021,active,0} [ns_server:debug,2014-08-19T16:49:40.631,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452580,622883}, tap_estimate, {replica_building,"default",1006,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19610.0>, <<"replication_building_1006_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:40.632,ns_1@10.242.238.88:<0.27381.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.19610.0>}, {'ns_1@10.242.238.90',<18125.19517.0>}]) [rebalance:info,2014-08-19T16:49:40.632,ns_1@10.242.238.88:<0.27372.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:40.633,ns_1@10.242.238.88:<0.27372.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1006 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:40.633,ns_1@10.242.238.88:<0.27372.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:40.634,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1006, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:40.638,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{750, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:40.639,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",750, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.27407.0>) [ns_server:debug,2014-08-19T16:49:40.639,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 750) [ns_server:debug,2014-08-19T16:49:40.639,ns_1@10.242.238.88:<0.27408.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:40.639,ns_1@10.242.238.88:<0.27408.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:40.639,ns_1@10.242.238.88:<0.27407.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 750 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:40.640,ns_1@10.242.238.88:<0.27413.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 750 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:40.640,ns_1@10.242.238.88:<0.27414.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 750 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:40.643,ns_1@10.242.238.88:<0.27415.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 750 into 'ns_1@10.242.238.91' is <18126.19636.0> [ns_server:debug,2014-08-19T16:49:40.645,ns_1@10.242.238.88:<0.27415.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 750 into 'ns_1@10.242.238.90' is <18125.19536.0> [rebalance:debug,2014-08-19T16:49:40.645,ns_1@10.242.238.88:<0.27407.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 750 is <0.27415.0> [ns_server:debug,2014-08-19T16:49:40.719,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1019. Nacking mccouch update. [views:debug,2014-08-19T16:49:40.720,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1019. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:40.720,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1019,active,0} [ns_server:debug,2014-08-19T16:49:40.721,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804, 438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254, 126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308, 670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,124,980, 852,486,358,720,592,226,954,826,460,332,694,566,200,928,800,434,306,668,540, 174,902,774,408,280,642,514,148,940,876,812,510,446,382,318,1004,744,680,616, 552,250,186,122,978,914,850,786,484,420,356,292,718,654,590,526,224,160,952, 888,824,458,394,330,266,1016,756,692,628,564,198,134,990,926,862,798,496,432, 368,304,730,666,602,538,236,172,108,964,900,836,772,470,406,342,278,704,640, 576,512,210,146,938,874,810,508,444,380,316,1002,742,678,614,550,248,184,120, 976,912,848,784,482,418,354,290,716,652,588,524,222,158,950,886,822,456,392, 328,264,1014,754,690,626,562,196,132,988,924,860,796,494,430,366,302,728,664, 600,536,234,170,962,898,834,770,468,404,340,276,766,702,638,574,208,144,936, 872,808,506,442,378,314,1000,740,676,612,548,246,182,118,974,910,846,782,480, 416,352,288,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,752,688, 624,560,194,130,986,922,858,794,492,428,364,300,726,662,598,534,232,168,960, 896,832,768,466,402,338,274,764,700,636,572,206,142,998,934,870,806,504,440, 376,312,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,712,648, 584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558,192,920,792, 426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374,736,608,242, 114,970,842,476,348,710,582,216,1021,944,816,450,322,684,556,190,918,790,424, 296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734,606,240,112, 968,840,474,346,708,580,214,1019,942,814,448,320,682,554,188,916,788,422,294, 656,528,162,890,396,268,1018,758,630,136,992,864,498,370,732,604,238,110,966, 838,472,344,706,578,212] [ns_server:debug,2014-08-19T16:49:40.737,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452580,727998}, tap_estimate, {replica_building,"default",750,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19636.0>, <<"replication_building_750_'ns_1@10.242.238.91'">>} [views:debug,2014-08-19T16:49:40.754,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1019. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:40.754,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1019,active,0} [ns_server:debug,2014-08-19T16:49:40.755,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452580,746389}, tap_estimate, {replica_building,"default",750,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19536.0>, <<"replication_building_750_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:40.755,ns_1@10.242.238.88:<0.27416.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.19536.0>}, {'ns_1@10.242.238.91',<18126.19636.0>}]) [rebalance:info,2014-08-19T16:49:40.756,ns_1@10.242.238.88:<0.27407.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:40.756,ns_1@10.242.238.88:<0.27407.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 750 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:40.757,ns_1@10.242.238.88:<0.27407.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:40.757,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{750, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:40.762,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{494, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:40.763,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",494, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.27442.0>) [ns_server:debug,2014-08-19T16:49:40.763,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 494) [ns_server:debug,2014-08-19T16:49:40.763,ns_1@10.242.238.88:<0.27443.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:40.763,ns_1@10.242.238.88:<0.27443.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:40.763,ns_1@10.242.238.88:<0.27442.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 494 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:40.764,ns_1@10.242.238.88:<0.27448.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 494 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:40.764,ns_1@10.242.238.88:<0.27449.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 494 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:40.769,ns_1@10.242.238.88:<0.27450.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 494 into 'ns_1@10.242.238.91' is <18126.19655.0> [ns_server:debug,2014-08-19T16:49:40.771,ns_1@10.242.238.88:<0.27450.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 494 into 'ns_1@10.242.238.89' is <18124.25385.0> [rebalance:debug,2014-08-19T16:49:40.771,ns_1@10.242.238.88:<0.27442.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 494 is <0.27450.0> [ns_server:debug,2014-08-19T16:49:40.853,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1017. Nacking mccouch update. [views:debug,2014-08-19T16:49:40.854,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1017. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:40.854,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1017,active,0} [ns_server:debug,2014-08-19T16:49:40.855,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804, 438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254, 126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308, 670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,124,980, 852,486,358,720,592,226,954,826,460,332,694,566,200,928,800,434,306,668,540, 174,902,774,408,280,642,514,148,940,876,812,510,446,382,318,1004,744,680,616, 552,250,186,122,978,914,850,786,484,420,356,292,718,654,590,526,224,160,952, 888,824,458,394,330,266,1016,756,692,628,564,198,134,990,926,862,798,496,432, 368,304,730,666,602,538,236,172,108,964,900,836,772,470,406,342,278,704,640, 576,512,210,146,938,874,810,508,444,380,316,1002,742,678,614,550,248,184,120, 976,912,848,784,482,418,354,290,716,652,588,524,222,158,950,886,822,456,392, 328,264,1014,754,690,626,562,196,132,988,924,860,796,494,430,366,302,728,664, 600,536,234,170,962,898,834,770,468,404,340,276,766,702,638,574,208,144,936, 872,808,506,442,378,314,1000,740,676,612,548,246,182,118,974,910,846,782,480, 416,352,288,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,752,688, 624,560,194,130,986,922,858,794,492,428,364,300,726,662,598,534,232,168,960, 896,832,768,466,402,338,274,764,700,636,572,206,142,998,934,870,806,504,440, 376,312,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,712,648, 584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558,192,920,792, 426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374,736,608,242, 114,970,842,476,348,710,582,216,1021,944,816,450,322,684,556,190,918,790,424, 296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734,606,240,112, 968,840,474,346,708,580,214,1019,942,814,448,320,682,554,188,916,788,422,294, 656,528,162,890,396,268,1018,758,630,136,992,864,498,370,732,604,238,110,966, 838,472,344,706,578,212,1017] [ns_server:debug,2014-08-19T16:49:40.863,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452580,854809}, tap_estimate, {replica_building,"default",494,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19655.0>, <<"replication_building_494_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:40.880,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452580,871752}, tap_estimate, {replica_building,"default",494,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25385.0>, <<"replication_building_494_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:40.881,ns_1@10.242.238.88:<0.27451.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25385.0>}, {'ns_1@10.242.238.91',<18126.19655.0>}]) [rebalance:info,2014-08-19T16:49:40.881,ns_1@10.242.238.88:<0.27442.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:40.882,ns_1@10.242.238.88:<0.27442.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 494 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:40.883,ns_1@10.242.238.88:<0.27442.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:40.883,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{494, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [views:debug,2014-08-19T16:49:40.887,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1017. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:40.887,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1005, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:40.888,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1005, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.27477.0>) [ns_server:debug,2014-08-19T16:49:40.888,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1017,active,0} [ns_server:debug,2014-08-19T16:49:40.888,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1005) [ns_server:debug,2014-08-19T16:49:40.888,ns_1@10.242.238.88:<0.27478.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:40.888,ns_1@10.242.238.88:<0.27478.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:40.888,ns_1@10.242.238.88:<0.27477.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1005 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:40.888,ns_1@10.242.238.88:<0.27483.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1005 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:40.889,ns_1@10.242.238.88:<0.27484.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1005 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:40.893,ns_1@10.242.238.88:<0.27485.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1005 into 'ns_1@10.242.238.90' is <18125.19556.0> [ns_server:debug,2014-08-19T16:49:40.896,ns_1@10.242.238.88:<0.27485.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1005 into 'ns_1@10.242.238.91' is <18126.19675.0> [rebalance:debug,2014-08-19T16:49:40.896,ns_1@10.242.238.88:<0.27477.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1005 is <0.27485.0> [ns_server:debug,2014-08-19T16:49:40.988,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1015. Nacking mccouch update. [views:debug,2014-08-19T16:49:40.988,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1015. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:40.988,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1015,active,0} [ns_server:debug,2014-08-19T16:49:40.988,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452580,979229}, tap_estimate, {replica_building,"default",1005,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19556.0>, <<"replication_building_1005_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:40.989,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804, 438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254, 126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308, 670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,124,980, 852,486,358,720,592,226,954,826,460,332,694,566,200,928,800,434,306,668,540, 174,902,774,408,280,642,514,148,940,876,812,510,446,382,318,1004,744,680,616, 552,250,186,122,978,914,850,786,484,420,356,292,718,654,590,526,224,160,952, 888,824,458,394,330,266,1016,756,692,628,564,198,134,990,926,862,798,496,432, 368,304,730,666,602,538,236,172,108,964,900,836,772,470,406,342,278,704,640, 576,512,210,146,1015,938,874,810,508,444,380,316,1002,742,678,614,550,248, 184,120,976,912,848,784,482,418,354,290,716,652,588,524,222,158,950,886,822, 456,392,328,264,1014,754,690,626,562,196,132,988,924,860,796,494,430,366,302, 728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702,638,574,208, 144,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,118,974,910,846, 782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326,262,1012, 752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662,598,534,232, 168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,998,934,870,806, 504,440,376,312,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558,192, 920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374,736, 608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,684,556,190,918, 790,424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734,606, 240,112,968,840,474,346,708,580,214,1019,942,814,448,320,682,554,188,916,788, 422,294,656,528,162,890,396,268,1018,758,630,136,992,864,498,370,732,604,238, 110,966,838,472,344,706,578,212,1017] [ns_server:debug,2014-08-19T16:49:41.007,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452580,998838}, tap_estimate, {replica_building,"default",1005,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19675.0>, <<"replication_building_1005_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:41.008,ns_1@10.242.238.88:<0.27486.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.19675.0>}, {'ns_1@10.242.238.90',<18125.19556.0>}]) [rebalance:info,2014-08-19T16:49:41.008,ns_1@10.242.238.88:<0.27477.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:41.009,ns_1@10.242.238.88:<0.27477.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1005 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:41.009,ns_1@10.242.238.88:<0.27477.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:41.010,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1005, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:41.014,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{749, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:41.014,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",749, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.27512.0>) [ns_server:debug,2014-08-19T16:49:41.015,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 749) [ns_server:debug,2014-08-19T16:49:41.015,ns_1@10.242.238.88:<0.27513.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:41.015,ns_1@10.242.238.88:<0.27513.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:41.015,ns_1@10.242.238.88:<0.27512.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 749 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:41.015,ns_1@10.242.238.88:<0.27518.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 749 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:41.016,ns_1@10.242.238.88:<0.27519.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 749 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:41.021,ns_1@10.242.238.88:<0.27520.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 749 into 'ns_1@10.242.238.91' is <18126.19695.0> [views:debug,2014-08-19T16:49:41.022,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1015. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:41.022,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1015,active,0} [ns_server:debug,2014-08-19T16:49:41.024,ns_1@10.242.238.88:<0.27520.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 749 into 'ns_1@10.242.238.90' is <18125.19561.0> [rebalance:debug,2014-08-19T16:49:41.024,ns_1@10.242.238.88:<0.27512.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 749 is <0.27520.0> [ns_server:debug,2014-08-19T16:49:41.088,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1013. Nacking mccouch update. [views:debug,2014-08-19T16:49:41.089,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1013. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:41.089,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1013,active,0} [ns_server:debug,2014-08-19T16:49:41.090,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804, 438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254, 126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308, 670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,124,980, 852,486,358,720,592,226,954,826,460,332,694,566,200,928,800,434,306,668,540, 174,902,774,408,280,642,514,148,940,876,812,510,446,382,318,1004,744,680,616, 552,250,186,122,978,914,850,786,484,420,356,292,718,654,590,526,224,160,952, 888,824,458,394,330,266,1016,756,692,628,564,198,134,990,926,862,798,496,432, 368,304,730,666,602,538,236,172,108,964,900,836,772,470,406,342,278,704,640, 576,512,210,146,1015,938,874,810,508,444,380,316,1002,742,678,614,550,248, 184,120,976,912,848,784,482,418,354,290,716,652,588,524,222,158,950,886,822, 456,392,328,264,1014,754,690,626,562,196,132,988,924,860,796,494,430,366,302, 728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702,638,574,208, 144,1013,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,118,974, 910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326, 262,1012,752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662,598, 534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,998,934, 870,806,504,440,376,312,738,674,610,546,244,180,116,972,908,844,780,478,414, 350,286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686, 558,192,920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502, 374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,684,556, 190,918,790,424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372, 734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320,682,554,188, 916,788,422,294,656,528,162,890,396,268,1018,758,630,136,992,864,498,370,732, 604,238,110,966,838,472,344,706,578,212,1017] [views:debug,2014-08-19T16:49:41.122,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1013. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:41.122,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1013,active,0} [ns_server:debug,2014-08-19T16:49:41.125,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452581,106590}, tap_estimate, {replica_building,"default",749,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19695.0>, <<"replication_building_749_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:41.133,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452581,124354}, tap_estimate, {replica_building,"default",749,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19561.0>, <<"replication_building_749_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:41.133,ns_1@10.242.238.88:<0.27521.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.19561.0>}, {'ns_1@10.242.238.91',<18126.19695.0>}]) [rebalance:info,2014-08-19T16:49:41.134,ns_1@10.242.238.88:<0.27512.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:41.134,ns_1@10.242.238.88:<0.27512.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 749 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:41.134,ns_1@10.242.238.88:<0.27512.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:41.135,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{749, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:41.140,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{493, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:41.140,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",493, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.27547.0>) [ns_server:debug,2014-08-19T16:49:41.141,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 493) [ns_server:debug,2014-08-19T16:49:41.141,ns_1@10.242.238.88:<0.27548.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:41.141,ns_1@10.242.238.88:<0.27548.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:41.141,ns_1@10.242.238.88:<0.27547.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 493 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:41.142,ns_1@10.242.238.88:<0.27553.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 493 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:41.142,ns_1@10.242.238.88:<0.27554.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 493 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:41.148,ns_1@10.242.238.88:<0.27555.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 493 into 'ns_1@10.242.238.91' is <18126.19700.0> [ns_server:debug,2014-08-19T16:49:41.150,ns_1@10.242.238.88:<0.27555.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 493 into 'ns_1@10.242.238.89' is <18124.25405.0> [rebalance:debug,2014-08-19T16:49:41.150,ns_1@10.242.238.88:<0.27547.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 493 is <0.27555.0> [ns_server:debug,2014-08-19T16:49:41.243,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452581,234164}, tap_estimate, {replica_building,"default",493,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19700.0>, <<"replication_building_493_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:41.260,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452581,251801}, tap_estimate, {replica_building,"default",493,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25405.0>, <<"replication_building_493_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:41.261,ns_1@10.242.238.88:<0.27556.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25405.0>}, {'ns_1@10.242.238.91',<18126.19700.0>}]) [rebalance:info,2014-08-19T16:49:41.261,ns_1@10.242.238.88:<0.27547.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:41.262,ns_1@10.242.238.88:<0.27547.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 493 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:41.262,ns_1@10.242.238.88:<0.27547.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:41.262,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{493, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:41.267,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1004, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:41.267,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1004, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.27587.0>) [ns_server:debug,2014-08-19T16:49:41.267,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1011. Nacking mccouch update. [ns_server:debug,2014-08-19T16:49:41.267,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1004) [views:debug,2014-08-19T16:49:41.267,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1011. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:41.268,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1011,active,0} [ns_server:debug,2014-08-19T16:49:41.268,ns_1@10.242.238.88:<0.27588.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:41.268,ns_1@10.242.238.88:<0.27588.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:41.268,ns_1@10.242.238.88:<0.27587.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1004 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:41.268,ns_1@10.242.238.88:<0.27593.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1004 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:41.268,ns_1@10.242.238.88:<0.27594.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1004 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:41.269,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,932,804, 438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620,254, 126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436,308, 670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,124,980, 852,486,358,720,592,226,954,826,460,332,694,566,200,928,800,434,306,668,540, 174,902,774,408,280,642,514,148,940,876,812,510,446,382,318,1004,744,680,616, 552,250,186,122,978,914,850,786,484,420,356,292,718,654,590,526,224,160,952, 888,824,458,394,330,266,1016,756,692,628,564,198,134,990,926,862,798,496,432, 368,304,730,666,602,538,236,172,108,964,900,836,772,470,406,342,278,704,640, 576,512,210,146,1015,938,874,810,508,444,380,316,1002,742,678,614,550,248, 184,120,976,912,848,784,482,418,354,290,716,652,588,524,222,158,950,886,822, 456,392,328,264,1014,754,690,626,562,196,132,988,924,860,796,494,430,366,302, 728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702,638,574,208, 144,1013,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,118,974, 910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326, 262,1012,752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662,598, 534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,1011,998, 934,870,806,504,440,376,312,738,674,610,546,244,180,116,972,908,844,780,478, 414,350,286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010, 686,558,192,920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868, 502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,684, 556,190,918,790,424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500, 372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320,682,554, 188,916,788,422,294,656,528,162,890,396,268,1018,758,630,136,992,864,498,370, 732,604,238,110,966,838,472,344,706,578,212,1017] [ns_server:debug,2014-08-19T16:49:41.272,ns_1@10.242.238.88:<0.27595.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1004 into 'ns_1@10.242.238.90' is <18125.19609.0> [ns_server:debug,2014-08-19T16:49:41.274,ns_1@10.242.238.88:<0.27595.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1004 into 'ns_1@10.242.238.91' is <18126.19719.0> [rebalance:debug,2014-08-19T16:49:41.274,ns_1@10.242.238.88:<0.27587.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1004 is <0.27595.0> [views:debug,2014-08-19T16:49:41.343,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1011. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:41.343,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1011,active,0} [ns_server:debug,2014-08-19T16:49:41.366,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452581,357488}, tap_estimate, {replica_building,"default",1004,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19609.0>, <<"replication_building_1004_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:41.384,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452581,375639}, tap_estimate, {replica_building,"default",1004,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19719.0>, <<"replication_building_1004_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:41.385,ns_1@10.242.238.88:<0.27596.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.19719.0>}, {'ns_1@10.242.238.90',<18125.19609.0>}]) [rebalance:info,2014-08-19T16:49:41.385,ns_1@10.242.238.88:<0.27587.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:41.386,ns_1@10.242.238.88:<0.27587.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1004 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:41.386,ns_1@10.242.238.88:<0.27587.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:41.387,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1004, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:41.391,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{748, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:41.391,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",748, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.27616.0>) [ns_server:debug,2014-08-19T16:49:41.392,ns_1@10.242.238.88:<0.27617.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:41.392,ns_1@10.242.238.88:<0.27617.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:41.392,ns_1@10.242.238.88:<0.27616.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 748 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:41.392,ns_1@10.242.238.88:<0.27622.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 748 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:41.392,ns_1@10.242.238.88:<0.27623.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 748 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:41.393,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 748) [ns_server:debug,2014-08-19T16:49:41.397,ns_1@10.242.238.88:<0.27624.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 748 into 'ns_1@10.242.238.91' is <18126.19725.0> [ns_server:debug,2014-08-19T16:49:41.400,ns_1@10.242.238.88:<0.27624.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 748 into 'ns_1@10.242.238.90' is <18125.19614.0> [rebalance:debug,2014-08-19T16:49:41.400,ns_1@10.242.238.88:<0.27616.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 748 is <0.27624.0> [ns_server:debug,2014-08-19T16:49:41.490,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452581,481933}, tap_estimate, {replica_building,"default",748,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19725.0>, <<"replication_building_748_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:41.501,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1009. Nacking mccouch update. [views:debug,2014-08-19T16:49:41.502,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1009. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:41.502,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1009,active,0} [ns_server:debug,2014-08-19T16:49:41.503,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620, 254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,930,802,436, 308,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252,124, 980,852,486,358,720,592,226,954,826,460,332,694,566,200,928,800,434,306,668, 540,174,902,774,408,280,642,514,148,876,510,382,1004,744,680,616,552,250,186, 122,978,914,850,786,484,420,356,292,718,654,590,526,224,160,952,888,824,458, 394,330,266,1016,756,692,628,564,198,134,990,926,862,798,496,432,368,304,730, 666,602,538,236,172,108,964,900,836,772,470,406,342,278,704,640,576,512,210, 146,1015,938,874,810,508,444,380,316,1002,742,678,614,550,248,184,120,976, 912,848,784,482,418,354,290,716,652,588,524,222,158,950,886,822,456,392,328, 264,1014,754,690,626,562,196,132,988,924,860,796,494,430,366,302,728,664,600, 536,234,170,962,898,834,770,468,404,340,276,766,702,638,574,208,144,1013,936, 872,808,506,442,378,314,1000,740,676,612,548,246,182,118,974,910,846,782,480, 416,352,288,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,752,688, 624,560,194,130,986,922,858,794,492,428,364,300,726,662,598,534,232,168,960, 896,832,768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870,806,504, 440,376,312,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,712, 648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558,192,920, 792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374,736,608, 242,114,970,842,476,348,710,582,216,1021,944,816,450,322,684,556,190,918,790, 424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734,606,240, 112,968,840,474,346,708,580,214,1019,942,814,448,320,682,554,188,916,788,422, 294,656,528,162,890,396,268,1018,758,630,136,992,864,498,370,732,604,238,110, 966,838,472,344,706,578,212,1017,940,812,446,318] [ns_server:debug,2014-08-19T16:49:41.510,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452581,501437}, tap_estimate, {replica_building,"default",748,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19614.0>, <<"replication_building_748_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:41.510,ns_1@10.242.238.88:<0.27625.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.19614.0>}, {'ns_1@10.242.238.91',<18126.19725.0>}]) [rebalance:info,2014-08-19T16:49:41.511,ns_1@10.242.238.88:<0.27616.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:41.511,ns_1@10.242.238.88:<0.27616.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 748 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:41.512,ns_1@10.242.238.88:<0.27616.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:41.513,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{748, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:41.517,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{492, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:41.517,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",492, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.27654.0>) [ns_server:debug,2014-08-19T16:49:41.518,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 492) [ns_server:debug,2014-08-19T16:49:41.518,ns_1@10.242.238.88:<0.27655.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:41.518,ns_1@10.242.238.88:<0.27655.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:41.518,ns_1@10.242.238.88:<0.27654.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 492 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:41.519,ns_1@10.242.238.88:<0.27660.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 492 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:41.519,ns_1@10.242.238.88:<0.27661.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 492 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:41.522,ns_1@10.242.238.88:<0.27662.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 492 into 'ns_1@10.242.238.91' is <18126.19744.0> [ns_server:debug,2014-08-19T16:49:41.525,ns_1@10.242.238.88:<0.27662.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 492 into 'ns_1@10.242.238.89' is <18124.25425.0> [rebalance:debug,2014-08-19T16:49:41.525,ns_1@10.242.238.88:<0.27654.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 492 is <0.27662.0> [views:debug,2014-08-19T16:49:41.577,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1009. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:41.577,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1009,active,0} [ns_server:debug,2014-08-19T16:49:41.617,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452581,608959}, tap_estimate, {replica_building,"default",492,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19744.0>, <<"replication_building_492_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:41.634,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452581,625793}, tap_estimate, {replica_building,"default",492,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25425.0>, <<"replication_building_492_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:41.635,ns_1@10.242.238.88:<0.27663.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25425.0>}, {'ns_1@10.242.238.91',<18126.19744.0>}]) [rebalance:info,2014-08-19T16:49:41.635,ns_1@10.242.238.88:<0.27654.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:41.635,ns_1@10.242.238.88:<0.27654.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 492 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:41.636,ns_1@10.242.238.88:<0.27654.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:41.636,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{492, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:41.641,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1003, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:41.641,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1003, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.27690.0>) [ns_server:debug,2014-08-19T16:49:41.641,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1003) [ns_server:debug,2014-08-19T16:49:41.641,ns_1@10.242.238.88:<0.27691.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:41.642,ns_1@10.242.238.88:<0.27691.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:41.642,ns_1@10.242.238.88:<0.27690.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1003 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:41.642,ns_1@10.242.238.88:<0.27696.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1003 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:41.642,ns_1@10.242.238.88:<0.27697.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1003 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:41.646,ns_1@10.242.238.88:<0.27698.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1003 into 'ns_1@10.242.238.90' is <18125.19654.0> [ns_server:debug,2014-08-19T16:49:41.647,ns_1@10.242.238.88:<0.27698.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1003 into 'ns_1@10.242.238.91' is <18126.19760.0> [rebalance:debug,2014-08-19T16:49:41.647,ns_1@10.242.238.88:<0.27690.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1003 is <0.27698.0> [ns_server:debug,2014-08-19T16:49:41.702,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1007. Nacking mccouch update. [views:debug,2014-08-19T16:49:41.702,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1007. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:41.702,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1007,active,0} [ns_server:debug,2014-08-19T16:49:41.703,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620, 254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930,802, 436,308,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252, 124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,928,800,434,306, 668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744,680,616,552,250, 186,122,978,914,850,786,484,420,356,292,718,654,590,526,224,160,952,888,824, 458,394,330,266,1016,756,692,628,564,198,134,990,926,862,798,496,432,368,304, 730,666,602,538,236,172,108,964,900,836,772,470,406,342,278,704,640,576,512, 210,146,1015,938,874,810,508,444,380,316,1002,742,678,614,550,248,184,120, 976,912,848,784,482,418,354,290,716,652,588,524,222,158,950,886,822,456,392, 328,264,1014,754,690,626,562,196,132,988,924,860,796,494,430,366,302,728,664, 600,536,234,170,962,898,834,770,468,404,340,276,766,702,638,574,208,144,1013, 936,872,808,506,442,378,314,1000,740,676,612,548,246,182,118,974,910,846,782, 480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,752, 688,624,560,194,130,986,922,858,794,492,428,364,300,726,662,598,534,232,168, 960,896,832,768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870,806, 504,440,376,312,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686,558,192, 920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374,736, 608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,684,556,190,918, 790,424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734,606, 240,112,968,840,474,346,708,580,214,1019,942,814,448,320,682,554,188,916,788, 422,294,656,528,162,890,396,268,1018,758,630,136,992,864,498,370,732,604,238, 110,966,838,472,344,706,578,212,1017,940,812,446,318] [ns_server:debug,2014-08-19T16:49:41.741,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452581,732525}, tap_estimate, {replica_building,"default",1003,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19654.0>, <<"replication_building_1003_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:41.758,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452581,749125}, tap_estimate, {replica_building,"default",1003,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19760.0>, <<"replication_building_1003_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:41.758,ns_1@10.242.238.88:<0.27699.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.19760.0>}, {'ns_1@10.242.238.90',<18125.19654.0>}]) [rebalance:info,2014-08-19T16:49:41.759,ns_1@10.242.238.88:<0.27690.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:41.759,ns_1@10.242.238.88:<0.27690.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1003 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:41.760,ns_1@10.242.238.88:<0.27690.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:41.760,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1003, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:41.765,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{747, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:41.765,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",747, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.27711.0>) [ns_server:debug,2014-08-19T16:49:41.765,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 747) [ns_server:debug,2014-08-19T16:49:41.766,ns_1@10.242.238.88:<0.27712.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:41.766,ns_1@10.242.238.88:<0.27712.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:41.766,ns_1@10.242.238.88:<0.27711.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 747 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:41.766,ns_1@10.242.238.88:<0.27717.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 747 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:41.766,ns_1@10.242.238.88:<0.27718.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 747 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:41.769,ns_1@10.242.238.88:<0.27719.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 747 into 'ns_1@10.242.238.91' is <18126.19780.0> [ns_server:debug,2014-08-19T16:49:41.772,ns_1@10.242.238.88:<0.27719.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 747 into 'ns_1@10.242.238.90' is <18125.19659.0> [rebalance:debug,2014-08-19T16:49:41.772,ns_1@10.242.238.88:<0.27711.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 747 is <0.27719.0> [views:debug,2014-08-19T16:49:41.778,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1007. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:41.778,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1007,active,0} [ns_server:debug,2014-08-19T16:49:41.864,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452581,855692}, tap_estimate, {replica_building,"default",747,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19780.0>, <<"replication_building_747_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:41.882,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452581,873836}, tap_estimate, {replica_building,"default",747,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19659.0>, <<"replication_building_747_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:41.883,ns_1@10.242.238.88:<0.27720.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.19659.0>}, {'ns_1@10.242.238.91',<18126.19780.0>}]) [rebalance:info,2014-08-19T16:49:41.883,ns_1@10.242.238.88:<0.27711.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:41.884,ns_1@10.242.238.88:<0.27711.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 747 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:41.884,ns_1@10.242.238.88:<0.27711.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:41.885,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{747, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:41.889,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{491, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:41.889,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",491, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.27746.0>) [ns_server:debug,2014-08-19T16:49:41.890,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 491) [ns_server:debug,2014-08-19T16:49:41.890,ns_1@10.242.238.88:<0.27747.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:41.890,ns_1@10.242.238.88:<0.27747.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:41.890,ns_1@10.242.238.88:<0.27746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 491 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:41.890,ns_1@10.242.238.88:<0.27752.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 491 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:41.890,ns_1@10.242.238.88:<0.27753.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 491 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:41.894,ns_1@10.242.238.88:<0.27754.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 491 into 'ns_1@10.242.238.91' is <18126.19799.0> [ns_server:debug,2014-08-19T16:49:41.896,ns_1@10.242.238.88:<0.27754.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 491 into 'ns_1@10.242.238.89' is <18124.25451.0> [rebalance:debug,2014-08-19T16:49:41.896,ns_1@10.242.238.88:<0.27746.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 491 is <0.27754.0> [ns_server:debug,2014-08-19T16:49:41.953,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1005. Nacking mccouch update. [views:debug,2014-08-19T16:49:41.953,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1005. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:41.953,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1005,active,0} [ns_server:debug,2014-08-19T16:49:41.954,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620, 254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930,802, 436,308,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252, 124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928,800,434, 306,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744,680,616,552, 250,186,122,978,914,850,786,484,420,356,292,718,654,590,526,224,160,952,888, 824,458,394,330,266,1016,756,692,628,564,198,134,990,926,862,798,496,432,368, 304,730,666,602,538,236,172,108,964,900,836,772,470,406,342,278,704,640,576, 512,210,146,1015,938,874,810,508,444,380,316,1002,742,678,614,550,248,184, 120,976,912,848,784,482,418,354,290,716,652,588,524,222,158,950,886,822,456, 392,328,264,1014,754,690,626,562,196,132,988,924,860,796,494,430,366,302,728, 664,600,536,234,170,962,898,834,770,468,404,340,276,766,702,638,574,208,144, 1013,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,118,974,910, 846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326,262, 1012,752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662,598,534, 232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,1011,998,934, 870,806,504,440,376,312,738,674,610,546,244,180,116,972,908,844,780,478,414, 350,286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,686, 558,192,920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502, 374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,684,556, 190,918,790,424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372, 734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320,682,554,188, 916,788,422,294,656,528,162,890,396,268,1018,758,630,136,992,864,498,370,732, 604,238,110,966,838,472,344,706,578,212,1017,940,812,446,318] [ns_server:debug,2014-08-19T16:49:41.989,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452581,980134}, tap_estimate, {replica_building,"default",491,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19799.0>, <<"replication_building_491_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:42.007,ns_1@10.242.238.88:<0.27755.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25451.0>}, {'ns_1@10.242.238.91',<18126.19799.0>}]) [rebalance:info,2014-08-19T16:49:42.007,ns_1@10.242.238.88:<0.27746.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:42.008,ns_1@10.242.238.88:<0.27746.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 491 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:42.008,ns_1@10.242.238.88:<0.27746.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:42.009,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{491, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:42.012,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452581,998332}, tap_estimate, {replica_building,"default",491,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25451.0>, <<"replication_building_491_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:42.013,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1002, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:42.014,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1002, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.27767.0>) [ns_server:debug,2014-08-19T16:49:42.014,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1002) [ns_server:debug,2014-08-19T16:49:42.014,ns_1@10.242.238.88:<0.27768.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:42.014,ns_1@10.242.238.88:<0.27768.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:42.014,ns_1@10.242.238.88:<0.27767.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1002 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:42.015,ns_1@10.242.238.88:<0.27773.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1002 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:42.015,ns_1@10.242.238.88:<0.27774.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1002 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:42.018,ns_1@10.242.238.88:<0.27775.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1002 into 'ns_1@10.242.238.90' is <18125.19679.0> [ns_server:debug,2014-08-19T16:49:42.020,ns_1@10.242.238.88:<0.27775.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1002 into 'ns_1@10.242.238.91' is <18126.19818.0> [rebalance:debug,2014-08-19T16:49:42.020,ns_1@10.242.238.88:<0.27767.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1002 is <0.27775.0> [views:debug,2014-08-19T16:49:42.037,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1005. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:42.037,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1005,active,0} [ns_server:debug,2014-08-19T16:49:42.113,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452582,103985}, tap_estimate, {replica_building,"default",1002,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.19679.0>, <<"replication_building_1002_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:42.130,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452582,121939}, tap_estimate, {replica_building,"default",1002,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.19818.0>, <<"replication_building_1002_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:42.132,ns_1@10.242.238.88:<0.27776.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.19818.0>}, {'ns_1@10.242.238.90',<18125.19679.0>}]) [rebalance:info,2014-08-19T16:49:42.132,ns_1@10.242.238.88:<0.27767.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:42.133,ns_1@10.242.238.88:<0.27767.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1002 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:42.133,ns_1@10.242.238.88:<0.27767.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:42.134,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1002, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:42.135,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:49:42.181,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1003. Nacking mccouch update. [views:debug,2014-08-19T16:49:42.181,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1003. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:42.181,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1003,active,0} [ns_server:debug,2014-08-19T16:49:42.182,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620, 254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930,802, 436,308,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252, 124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928,800,434, 306,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744,680,616,552, 250,186,122,978,914,850,786,484,420,356,292,718,654,590,526,224,160,952,888, 824,458,394,330,266,1016,756,692,628,564,198,134,1003,990,926,862,798,496, 432,368,304,730,666,602,538,236,172,108,964,900,836,772,470,406,342,278,704, 640,576,512,210,146,1015,938,874,810,508,444,380,316,1002,742,678,614,550, 248,184,120,976,912,848,784,482,418,354,290,716,652,588,524,222,158,950,886, 822,456,392,328,264,1014,754,690,626,562,196,132,988,924,860,796,494,430,366, 302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702,638,574, 208,144,1013,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,118, 974,910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390, 326,262,1012,752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662, 598,534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,1011, 998,934,870,806,504,440,376,312,738,674,610,546,244,180,116,972,908,844,780, 478,414,350,286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260, 1010,686,558,192,920,792,426,298,660,532,166,894,400,272,1022,762,634,140, 996,868,502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450, 322,684,556,190,918,790,424,296,658,530,164,892,398,270,1020,760,632,138,994, 866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320, 682,554,188,916,788,422,294,656,528,162,890,396,268,1018,758,630,136,992,864, 498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812,446,318] [views:debug,2014-08-19T16:49:42.214,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1003. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:42.215,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1003,active,0} [ns_server:debug,2014-08-19T16:49:42.291,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1001. Nacking mccouch update. [views:debug,2014-08-19T16:49:42.291,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1001. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:42.291,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1001,active,0} [ns_server:debug,2014-08-19T16:49:42.292,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620, 254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930,802, 436,308,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252, 124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928,800,434, 306,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744,680,616,552, 250,186,122,978,914,850,786,484,420,356,292,718,654,590,526,224,160,952,888, 824,458,394,330,266,1016,756,692,628,564,198,134,1003,990,926,862,798,496, 432,368,304,730,666,602,538,236,172,108,964,900,836,772,470,406,342,278,704, 640,576,512,210,146,1015,938,874,810,508,444,380,316,1002,742,678,614,550, 248,184,120,976,912,848,784,482,418,354,290,716,652,588,524,222,158,950,886, 822,456,392,328,264,1014,754,690,626,562,196,132,1001,988,924,860,796,494, 430,366,302,728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702, 638,574,208,144,1013,936,872,808,506,442,378,314,1000,740,676,612,548,246, 182,118,974,910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820, 454,390,326,262,1012,752,688,624,560,194,130,986,922,858,794,492,428,364,300, 726,662,598,534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206, 142,1011,998,934,870,806,504,440,376,312,738,674,610,546,244,180,116,972,908, 844,780,478,414,350,286,712,648,584,520,218,154,1023,946,882,818,452,388,324, 260,1010,686,558,192,920,792,426,298,660,532,166,894,400,272,1022,762,634, 140,996,868,502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816, 450,322,684,556,190,918,790,424,296,658,530,164,892,398,270,1020,760,632,138, 994,866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448, 320,682,554,188,916,788,422,294,656,528,162,890,396,268,1018,758,630,136,992, 864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812,446,318] [views:debug,2014-08-19T16:49:42.325,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1001. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:42.325,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1001,active,0} [ns_server:debug,2014-08-19T16:49:42.400,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 999. Nacking mccouch update. [views:debug,2014-08-19T16:49:42.400,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/999. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:42.400,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",999,active,0} [ns_server:debug,2014-08-19T16:49:42.401,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620, 254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930,802, 436,308,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252, 124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928,800,434, 306,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744,616,250,122, 978,914,850,786,484,420,356,292,718,654,590,526,224,160,952,888,824,458,394, 330,266,1016,756,692,628,564,198,134,1003,990,926,862,798,496,432,368,304, 730,666,602,538,236,172,108,964,900,836,772,470,406,342,278,704,640,576,512, 210,146,1015,938,874,810,508,444,380,316,1002,742,678,614,550,248,184,120, 976,912,848,784,482,418,354,290,716,652,588,524,222,158,950,886,822,456,392, 328,264,1014,754,690,626,562,196,132,1001,988,924,860,796,494,430,366,302, 728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702,638,574,208, 144,1013,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,118,974, 910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326, 262,1012,999,752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662, 598,534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,1011, 998,934,870,806,504,440,376,312,738,674,610,546,244,180,116,972,908,844,780, 478,414,350,286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260, 1010,686,558,192,920,792,426,298,660,532,166,894,400,272,1022,762,634,140, 996,868,502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450, 322,684,556,190,918,790,424,296,658,530,164,892,398,270,1020,760,632,138,994, 866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320, 682,554,188,916,788,422,294,656,528,162,890,396,268,1018,758,630,136,992,864, 498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812,446,318,680, 552,186] [views:debug,2014-08-19T16:49:42.434,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/999. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:42.434,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",999,active,0} [ns_server:debug,2014-08-19T16:49:42.509,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 997. Nacking mccouch update. [views:debug,2014-08-19T16:49:42.509,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/997. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:42.509,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",997,active,0} [ns_server:debug,2014-08-19T16:49:42.511,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620, 254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930,802, 436,308,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252, 124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928,800,434, 306,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744,616,250,122, 978,914,850,786,484,420,356,292,718,654,590,526,224,160,952,888,824,458,394, 330,266,1016,756,692,628,564,198,134,1003,990,926,862,798,496,432,368,304, 730,666,602,538,236,172,108,964,900,836,772,470,406,342,278,704,640,576,512, 210,146,1015,938,874,810,508,444,380,316,1002,742,678,614,550,248,184,120, 976,912,848,784,482,418,354,290,716,652,588,524,222,158,950,886,822,456,392, 328,264,1014,754,690,626,562,196,132,1001,988,924,860,796,494,430,366,302, 728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702,638,574,208, 144,1013,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,118,974, 910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326, 262,1012,999,752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662, 598,534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,1011, 998,934,870,806,504,440,376,312,738,674,610,546,244,180,116,972,908,844,780, 478,414,350,286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260, 1010,997,686,558,192,920,792,426,298,660,532,166,894,400,272,1022,762,634, 140,996,868,502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816, 450,322,684,556,190,918,790,424,296,658,530,164,892,398,270,1020,760,632,138, 994,866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448, 320,682,554,188,916,788,422,294,656,528,162,890,396,268,1018,758,630,136,992, 864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812,446,318, 680,552,186] [views:debug,2014-08-19T16:49:42.543,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/997. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:42.543,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",997,active,0} [ns_server:debug,2014-08-19T16:49:42.718,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 995. Nacking mccouch update. [views:debug,2014-08-19T16:49:42.719,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/995. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:42.719,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",995,active,0} [ns_server:debug,2014-08-19T16:49:42.720,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620, 254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930,802, 436,308,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252, 124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928,800,434, 306,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744,616,250,122, 978,914,850,786,484,420,356,292,718,654,590,526,224,160,952,888,824,458,394, 330,266,1016,756,692,628,564,198,134,1003,990,926,862,798,496,432,368,304, 730,666,602,538,236,172,108,964,900,836,772,470,406,342,278,704,640,576,512, 210,146,1015,938,874,810,508,444,380,316,1002,742,678,614,550,248,184,120, 976,912,848,784,482,418,354,290,716,652,588,524,222,158,950,886,822,456,392, 328,264,1014,754,690,626,562,196,132,1001,988,924,860,796,494,430,366,302, 728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702,638,574,208, 144,1013,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,118,974, 910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326, 262,1012,999,752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662, 598,534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,1011, 998,934,870,806,504,440,376,312,738,674,610,546,244,180,116,972,908,844,780, 478,414,350,286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260, 1010,997,686,558,192,920,792,426,298,660,532,166,894,400,272,1022,762,634, 140,996,868,502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816, 450,322,995,684,556,190,918,790,424,296,658,530,164,892,398,270,1020,760,632, 138,994,866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814, 448,320,682,554,188,916,788,422,294,656,528,162,890,396,268,1018,758,630,136, 992,864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812,446, 318,680,552,186] [views:debug,2014-08-19T16:49:42.771,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/995. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:42.771,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",995,active,0} [ns_server:debug,2014-08-19T16:49:42.947,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 993. Nacking mccouch update. [views:debug,2014-08-19T16:49:42.947,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/993. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:42.947,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",993,active,0} [ns_server:debug,2014-08-19T16:49:42.948,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620, 254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930,802, 436,308,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252, 124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928,800,434, 306,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744,616,250,122, 978,914,850,786,484,420,356,292,718,654,590,526,224,160,952,888,824,458,394, 330,266,1016,756,692,628,564,198,134,1003,990,926,862,798,496,432,368,304, 730,666,602,538,236,172,108,964,900,836,772,470,406,342,278,704,640,576,512, 210,146,1015,938,874,810,508,444,380,316,1002,742,678,614,550,248,184,120, 976,912,848,784,482,418,354,290,716,652,588,524,222,158,950,886,822,456,392, 328,264,1014,754,690,626,562,196,132,1001,988,924,860,796,494,430,366,302, 728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702,638,574,208, 144,1013,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,118,974, 910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326, 262,1012,999,752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662, 598,534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,1011, 998,934,870,806,504,440,376,312,738,674,610,546,244,180,116,972,908,844,780, 478,414,350,286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260, 1010,997,686,558,192,920,792,426,298,660,532,166,894,400,272,1022,762,634, 140,996,868,502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816, 450,322,995,684,556,190,918,790,424,296,658,530,164,892,398,270,1020,760,632, 138,994,866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814, 448,320,993,682,554,188,916,788,422,294,656,528,162,890,396,268,1018,758,630, 136,992,864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812, 446,318,680,552,186] [ns_server:debug,2014-08-19T16:49:42.949,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,1002, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1002_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1002_'ns_1@10.242.238.90'">>}]}, {move_state,491, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_491_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_491_'ns_1@10.242.238.91'">>}]}, {move_state,747, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_747_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_747_'ns_1@10.242.238.91'">>}]}, {move_state,1003, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1003_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1003_'ns_1@10.242.238.90'">>}]}, {move_state,492, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_492_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_492_'ns_1@10.242.238.91'">>}]}, {move_state,748, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_748_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_748_'ns_1@10.242.238.91'">>}]}, {move_state,1004, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1004_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1004_'ns_1@10.242.238.90'">>}]}, {move_state,493, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_493_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_493_'ns_1@10.242.238.91'">>}]}, {move_state,749, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_749_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_749_'ns_1@10.242.238.91'">>}]}, {move_state,1005, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1005_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1005_'ns_1@10.242.238.90'">>}]}, {move_state,494, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_494_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_494_'ns_1@10.242.238.91'">>}]}, {move_state,750, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_750_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_750_'ns_1@10.242.238.91'">>}]}, {move_state,1006, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1006_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1006_'ns_1@10.242.238.90'">>}]}, {move_state,495, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_495_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_495_'ns_1@10.242.238.91'">>}]}, {move_state,751, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_751_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_751_'ns_1@10.242.238.91'">>}]}, {move_state,1007, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1007_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1007_'ns_1@10.242.238.90'">>}]}, {move_state,496, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_496_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_496_'ns_1@10.242.238.91'">>}]}, {move_state,752, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_752_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_752_'ns_1@10.242.238.91'">>}]}, {move_state,1008, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1008_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1008_'ns_1@10.242.238.90'">>}]}, {move_state,497, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_497_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_497_'ns_1@10.242.238.91'">>}]}, {move_state,753, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_753_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_753_'ns_1@10.242.238.91'">>}]}, {move_state,1009, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1009_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1009_'ns_1@10.242.238.90'">>}]}, {move_state,498, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_498_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_498_'ns_1@10.242.238.91'">>}]}, {move_state,754, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_754_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_754_'ns_1@10.242.238.91'">>}]}, {move_state,1010, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1010_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1010_'ns_1@10.242.238.90'">>}]}, {move_state,499, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_499_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_499_'ns_1@10.242.238.91'">>}]}, {move_state,755, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_755_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_755_'ns_1@10.242.238.91'">>}]}, {move_state,1011, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1011_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1011_'ns_1@10.242.238.90'">>}]}, {move_state,500, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_500_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_500_'ns_1@10.242.238.91'">>}]}, {move_state,756, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_756_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_756_'ns_1@10.242.238.91'">>}]}, {move_state,1012, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1012_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1012_'ns_1@10.242.238.90'">>}]}, {move_state,501, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_501_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_501_'ns_1@10.242.238.91'">>}]}, {move_state,757, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_757_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_757_'ns_1@10.242.238.91'">>}]}, {move_state,1013, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1013_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1013_'ns_1@10.242.238.90'">>}]}, {move_state,502, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_502_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_502_'ns_1@10.242.238.91'">>}]}, {move_state,758, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_758_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_758_'ns_1@10.242.238.91'">>}]}, {move_state,1014, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1014_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1014_'ns_1@10.242.238.90'">>}]}, {move_state,503, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_503_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_503_'ns_1@10.242.238.91'">>}]}, {move_state,759, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_759_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_759_'ns_1@10.242.238.91'">>}]}, {move_state,1015, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1015_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1015_'ns_1@10.242.238.90'">>}]}, {move_state,504, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_504_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_504_'ns_1@10.242.238.91'">>}]}, {move_state,760, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_760_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_760_'ns_1@10.242.238.91'">>}]}, {move_state,1016, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1016_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1016_'ns_1@10.242.238.90'">>}]}, {move_state,505, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_505_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_505_'ns_1@10.242.238.91'">>}]}, {move_state,761, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_761_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_761_'ns_1@10.242.238.91'">>}]}, {move_state,1017, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1017_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1017_'ns_1@10.242.238.90'">>}]}, {move_state,506, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_506_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_506_'ns_1@10.242.238.91'">>}]}, {move_state,762, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_762_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_762_'ns_1@10.242.238.91'">>}]}, {move_state,1018, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1018_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1018_'ns_1@10.242.238.90'">>}]}, {move_state,507, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_507_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_507_'ns_1@10.242.238.91'">>}]}, {move_state,763, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_763_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_763_'ns_1@10.242.238.91'">>}]}, {move_state,1019, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1019_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1019_'ns_1@10.242.238.90'">>}]}, {move_state,508, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_508_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_508_'ns_1@10.242.238.91'">>}]}, {move_state,764, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_764_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_764_'ns_1@10.242.238.91'">>}]}, {move_state,1020, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1020_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1020_'ns_1@10.242.238.90'">>}]}, {move_state,509, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_509_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_509_'ns_1@10.242.238.91'">>}]}, {move_state,765, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_765_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_765_'ns_1@10.242.238.91'">>}]}, {move_state,1021, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1021_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1021_'ns_1@10.242.238.90'">>}]}, {move_state,510, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_510_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_510_'ns_1@10.242.238.91'">>}]}, {move_state,766, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_766_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_766_'ns_1@10.242.238.91'">>}]}, {move_state,1022, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1022_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1022_'ns_1@10.242.238.90'">>}]}, {move_state,511, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_511_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_511_'ns_1@10.242.238.91'">>}]}, {move_state,767, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_767_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_767_'ns_1@10.242.238.91'">>}]}, {move_state,1023, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1023_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1023_'ns_1@10.242.238.90'">>}]}] [ns_server:debug,2014-08-19T16:49:42.955,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1002, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:42.955,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 491, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 747, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1003, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:42.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 492, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 748, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1004, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:42.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 493, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 749, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1005, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:42.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 494, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 750, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1006, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:42.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 495, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 751, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1007, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:42.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 496, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 752, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1008, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:42.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 497, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 753, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1009, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:42.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 498, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 754, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1010, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:42.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 499, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 755, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1011, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:42.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 500, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 756, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1012, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:42.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 501, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 757, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1013, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:42.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 502, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 758, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1014, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:42.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 503, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 759, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1015, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:42.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 504, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 760, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1016, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:42.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 505, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 761, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1017, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:42.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 506, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 762, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1018, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:42.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 507, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 763, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1019, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:42.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 508, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 764, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1020, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:42.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 509, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 765, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1021, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:42.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 510, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 766, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1022, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:42.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 511, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 767, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:42.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1023, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [views:debug,2014-08-19T16:49:43.031,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/993. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:43.031,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",993,active,0} [ns_server:debug,2014-08-19T16:49:43.215,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 991. Nacking mccouch update. [views:debug,2014-08-19T16:49:43.215,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/991. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:43.215,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",991,active,0} [ns_server:debug,2014-08-19T16:49:43.216,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620, 254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930,802, 436,308,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252, 124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928,800,434, 306,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744,616,250,122, 978,914,850,786,484,420,356,292,718,654,590,526,224,160,952,888,824,458,394, 330,266,1016,756,692,628,564,198,134,1003,990,926,862,798,496,432,368,304, 730,666,602,538,236,172,108,964,900,836,772,470,406,342,278,704,640,576,512, 210,146,1015,938,874,810,508,444,380,316,1002,742,678,614,550,248,184,120, 976,912,848,784,482,418,354,290,716,652,588,524,222,158,950,886,822,456,392, 328,264,1014,754,690,626,562,196,132,1001,988,924,860,796,494,430,366,302, 728,664,600,536,234,170,962,898,834,770,468,404,340,276,766,702,638,574,208, 144,1013,936,872,808,506,442,378,314,1000,740,676,612,548,246,182,118,974, 910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326, 262,1012,999,752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662, 598,534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,1011, 998,934,870,806,504,440,376,312,738,674,610,546,244,180,116,972,908,844,780, 478,414,350,286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260, 1010,997,686,558,192,920,792,426,298,660,532,166,894,400,272,1022,762,634, 140,996,868,502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816, 450,322,995,684,556,190,918,790,424,296,658,530,164,892,398,270,1020,760,632, 138,994,866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814, 448,320,993,682,554,188,916,788,422,294,656,528,162,890,396,268,1018,758,630, 136,992,864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812, 446,318,991,680,552,186] [views:debug,2014-08-19T16:49:43.299,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/991. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:43.299,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",991,active,0} [ns_server:debug,2014-08-19T16:49:43.440,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 989. Nacking mccouch update. [views:debug,2014-08-19T16:49:43.440,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/989. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:43.440,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",989,active,0} [ns_server:debug,2014-08-19T16:49:43.442,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620, 254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930,802, 436,308,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252, 124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928,800,434, 306,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744,616,250,122, 978,850,484,356,718,654,590,526,224,160,952,888,824,458,394,330,266,1016,756, 692,628,564,198,134,1003,990,926,862,798,496,432,368,304,730,666,602,538,236, 172,108,964,900,836,772,470,406,342,278,704,640,576,512,210,146,1015,938,874, 810,508,444,380,316,1002,989,742,678,614,550,248,184,120,976,912,848,784,482, 418,354,290,716,652,588,524,222,158,950,886,822,456,392,328,264,1014,754,690, 626,562,196,132,1001,988,924,860,796,494,430,366,302,728,664,600,536,234,170, 962,898,834,770,468,404,340,276,766,702,638,574,208,144,1013,936,872,808,506, 442,378,314,1000,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288, 714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688,624,560, 194,130,986,922,858,794,492,428,364,300,726,662,598,534,232,168,960,896,832, 768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870,806,504,440,376, 312,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,712,648,584, 520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192,920,792, 426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374,736,608,242, 114,970,842,476,348,710,582,216,1021,944,816,450,322,995,684,556,190,918,790, 424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734,606,240, 112,968,840,474,346,708,580,214,1019,942,814,448,320,993,682,554,188,916,788, 422,294,656,528,162,890,396,268,1018,758,630,136,992,864,498,370,732,604,238, 110,966,838,472,344,706,578,212,1017,940,812,446,318,991,680,552,186,914,786, 420,292] [views:debug,2014-08-19T16:49:43.508,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/989. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:43.508,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",989,active,0} [ns_server:debug,2014-08-19T16:49:43.633,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 987. Nacking mccouch update. [views:debug,2014-08-19T16:49:43.633,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/987. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:43.633,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",987,active,0} [ns_server:debug,2014-08-19T16:49:43.634,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620, 254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930,802, 436,308,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252, 124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928,800,434, 306,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744,616,250,122, 978,850,484,356,718,654,590,526,224,160,952,888,824,458,394,330,266,1016,756, 692,628,564,198,134,1003,990,926,862,798,496,432,368,304,730,666,602,538,236, 172,108,964,900,836,772,470,406,342,278,704,640,576,512,210,146,1015,938,874, 810,508,444,380,316,1002,989,742,678,614,550,248,184,120,976,912,848,784,482, 418,354,290,716,652,588,524,222,158,950,886,822,456,392,328,264,1014,754,690, 626,562,196,132,1001,988,924,860,796,494,430,366,302,728,664,600,536,234,170, 962,898,834,770,468,404,340,276,766,702,638,574,208,144,1013,936,872,808,506, 442,378,314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352, 288,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688,624, 560,194,130,986,922,858,794,492,428,364,300,726,662,598,534,232,168,960,896, 832,768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870,806,504,440, 376,312,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,712,648, 584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192,920, 792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374,736,608, 242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995,684,556,190,918, 790,424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734,606, 240,112,968,840,474,346,708,580,214,1019,942,814,448,320,993,682,554,188,916, 788,422,294,656,528,162,890,396,268,1018,758,630,136,992,864,498,370,732,604, 238,110,966,838,472,344,706,578,212,1017,940,812,446,318,991,680,552,186,914, 786,420,292] [views:debug,2014-08-19T16:49:43.700,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/987. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:43.700,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",987,active,0} [ns_server:debug,2014-08-19T16:49:43.794,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 985. Nacking mccouch update. [views:debug,2014-08-19T16:49:43.794,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/985. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:43.794,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",985,active,0} [ns_server:debug,2014-08-19T16:49:43.795,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748,620, 254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930,802, 436,308,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618,252, 124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928,800,434, 306,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744,616,250,122, 978,850,484,356,718,654,590,526,224,160,952,888,824,458,394,330,266,1016,756, 692,628,564,198,134,1003,990,926,862,798,496,432,368,304,730,666,602,538,236, 172,108,964,900,836,772,470,406,342,278,704,640,576,512,210,146,1015,938,874, 810,508,444,380,316,1002,989,742,678,614,550,248,184,120,976,912,848,784,482, 418,354,290,716,652,588,524,222,158,950,886,822,456,392,328,264,1014,754,690, 626,562,196,132,1001,988,924,860,796,494,430,366,302,728,664,600,536,234,170, 962,898,834,770,468,404,340,276,766,702,638,574,208,144,1013,936,872,808,506, 442,378,314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352, 288,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688,624, 560,194,130,986,922,858,794,492,428,364,300,726,662,598,534,232,168,960,896, 832,768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870,806,504,440, 376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,712, 648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192, 920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374,736, 608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995,684,556,190, 918,790,424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734, 606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320,993,682,554,188, 916,788,422,294,656,528,162,890,396,268,1018,758,630,136,992,864,498,370,732, 604,238,110,966,838,472,344,706,578,212,1017,940,812,446,318,991,680,552,186, 914,786,420,292] [views:debug,2014-08-19T16:49:43.828,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/985. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:43.828,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",985,active,0} [ns_server:debug,2014-08-19T16:49:43.903,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 983. Nacking mccouch update. [views:debug,2014-08-19T16:49:43.903,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/983. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:43.904,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",983,active,0} [ns_server:debug,2014-08-19T16:49:43.904,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748, 620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930, 802,436,308,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746,618, 252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928,800, 434,306,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744,616,250, 122,978,850,484,356,718,654,590,526,224,160,952,888,824,458,394,330,266,1016, 756,692,628,564,198,134,1003,990,926,862,798,496,432,368,304,730,666,602,538, 236,172,108,964,900,836,772,470,406,342,278,704,640,576,512,210,146,1015,938, 874,810,508,444,380,316,1002,989,742,678,614,550,248,184,120,976,912,848,784, 482,418,354,290,716,652,588,524,222,158,950,886,822,456,392,328,264,1014,754, 690,626,562,196,132,1001,988,924,860,796,494,430,366,302,728,664,600,536,234, 170,962,898,834,770,468,404,340,276,766,702,638,574,208,144,1013,936,872,808, 506,442,378,314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416, 352,288,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688, 624,560,194,130,986,922,858,794,492,428,364,300,726,662,598,534,232,168,960, 896,832,768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870,806,504, 440,376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558, 192,920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374, 736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995,684,556, 190,918,790,424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372, 734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320,993,682,554, 188,916,788,422,294,656,528,162,890,396,268,1018,758,630,136,992,864,498,370, 732,604,238,110,966,838,472,344,706,578,212,1017,940,812,446,318,991,680,552, 186,914,786,420,292] [views:debug,2014-08-19T16:49:43.937,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/983. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:43.937,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",983,active,0} [ns_server:debug,2014-08-19T16:49:44.037,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 981. Nacking mccouch update. [views:debug,2014-08-19T16:49:44.037,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/981. Updated state: active (0) [rebalance:info,2014-08-19T16:49:44.038,ns_1@10.242.238.88:<0.26289.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 506 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:debug,2014-08-19T16:49:44.038,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",981,active,0} [rebalance:info,2014-08-19T16:49:44.039,ns_1@10.242.238.88:<0.28025.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 511) [rebalance:info,2014-08-19T16:49:44.040,ns_1@10.242.238.88:<0.25816.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [ns_server:info,2014-08-19T16:49:44.041,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 506 state to active [ns_server:debug,2014-08-19T16:49:44.042,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748, 620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930, 802,436,308,981,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746, 618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928, 800,434,306,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744,616, 250,122,978,850,484,356,718,654,590,526,224,160,952,888,824,458,394,330,266, 1016,756,692,628,564,198,134,1003,990,926,862,798,496,432,368,304,730,666, 602,538,236,172,108,964,900,836,772,470,406,342,278,704,640,576,512,210,146, 1015,938,874,810,508,444,380,316,1002,989,742,678,614,550,248,184,120,976, 912,848,784,482,418,354,290,716,652,588,524,222,158,950,886,822,456,392,328, 264,1014,754,690,626,562,196,132,1001,988,924,860,796,494,430,366,302,728, 664,600,536,234,170,962,898,834,770,468,404,340,276,766,702,638,574,208,144, 1013,936,872,808,506,442,378,314,1000,987,740,676,612,548,246,182,118,974, 910,846,782,480,416,352,288,714,650,586,522,220,156,948,884,820,454,390,326, 262,1012,999,752,688,624,560,194,130,986,922,858,794,492,428,364,300,726,662, 598,534,232,168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,1011, 998,934,870,806,504,440,376,312,985,738,674,610,546,244,180,116,972,908,844, 780,478,414,350,286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260, 1010,997,686,558,192,920,792,426,298,660,532,166,894,400,272,1022,762,634, 140,996,868,502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816, 450,322,995,684,556,190,918,790,424,296,658,530,164,892,398,270,1020,760,632, 138,994,866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814, 448,320,993,682,554,188,916,788,422,294,656,528,162,890,396,268,1018,758,630, 136,992,864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812, 446,318,991,680,552,186,914,786,420,292] [ns_server:info,2014-08-19T16:49:44.044,ns_1@10.242.238.88:<0.25824.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_511_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:49:44.044,ns_1@10.242.238.88:<0.26289.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 506 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:44.044,ns_1@10.242.238.88:<0.25816.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:49:44.044,ns_1@10.242.238.88:<0.26289.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:44.047,ns_1@10.242.238.88:<0.25816.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 511 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.28032.0> [ns_server:info,2014-08-19T16:49:44.048,ns_1@10.242.238.88:<0.28032.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 511 to state replica [views:debug,2014-08-19T16:49:44.074,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/981. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:44.074,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",981,active,0} [ns_server:debug,2014-08-19T16:49:44.089,ns_1@10.242.238.88:<0.28032.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_511 [rebalance:info,2014-08-19T16:49:44.091,ns_1@10.242.238.88:<0.28032.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[511]}, {checkpoints,[{511,1}]}, {name,<<"rebalance_511">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[511]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"511"}]} [rebalance:debug,2014-08-19T16:49:44.091,ns_1@10.242.238.88:<0.28032.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28033.0> [rebalance:info,2014-08-19T16:49:44.092,ns_1@10.242.238.88:<0.28032.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:44.094,ns_1@10.242.238.88:<0.28032.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:44.094,ns_1@10.242.238.88:<0.28032.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:44.095,ns_1@10.242.238.88:<0.25816.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 511 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:44.097,ns_1@10.242.238.88:<0.25824.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:44.101,ns_1@10.242.238.88:<0.25824.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_511_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:44.102,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 511 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:49:44.102,ns_1@10.242.238.88:<0.28037.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 511 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:49:44.109,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:44.111,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:44.111,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:44.111,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{511, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:44.111,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:44.123,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 511 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:44.124,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 511) [ns_server:debug,2014-08-19T16:49:44.125,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:49:44.146,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 979. Nacking mccouch update. [views:debug,2014-08-19T16:49:44.146,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/979. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:44.146,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",979,active,0} [ns_server:debug,2014-08-19T16:49:44.148,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748, 620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930, 802,436,308,981,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746, 618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928, 800,434,306,979,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744, 616,250,122,978,850,484,356,718,590,224,952,888,824,458,394,330,266,1016,756, 692,628,564,198,134,1003,990,926,862,798,496,432,368,304,730,666,602,538,236, 172,108,964,900,836,772,470,406,342,278,704,640,576,512,210,146,1015,938,874, 810,508,444,380,316,1002,989,742,678,614,550,248,184,120,976,912,848,784,482, 418,354,290,716,652,588,524,222,158,950,886,822,456,392,328,264,1014,754,690, 626,562,196,132,1001,988,924,860,796,494,430,366,302,728,664,600,536,234,170, 962,898,834,770,468,404,340,276,766,702,638,574,208,144,1013,936,872,808,506, 442,378,314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352, 288,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688,624, 560,194,130,986,922,858,794,492,428,364,300,726,662,598,534,232,168,960,896, 832,768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870,806,504,440, 376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,712, 648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192, 920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374,736, 608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995,684,556,190, 918,790,424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372,734, 606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320,993,682,554,188, 916,788,422,294,656,528,162,890,396,268,1018,758,630,136,992,864,498,370,732, 604,238,110,966,838,472,344,706,578,212,1017,940,812,446,318,991,680,552,186, 914,786,420,292,654,526,160] [views:debug,2014-08-19T16:49:44.180,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/979. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:44.180,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",979,active,0} [ns_server:debug,2014-08-19T16:49:44.255,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 977. Nacking mccouch update. [views:debug,2014-08-19T16:49:44.256,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/977. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:44.256,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",977,active,0} [ns_server:debug,2014-08-19T16:49:44.257,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748, 620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930, 802,436,308,981,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746, 618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928, 800,434,306,979,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744, 616,250,122,978,850,484,356,718,590,224,952,888,824,458,394,330,266,1016,756, 692,628,564,198,134,1003,990,926,862,798,496,432,368,304,977,730,666,602,538, 236,172,108,964,900,836,772,470,406,342,278,704,640,576,512,210,146,1015,938, 874,810,508,444,380,316,1002,989,742,678,614,550,248,184,120,976,912,848,784, 482,418,354,290,716,652,588,524,222,158,950,886,822,456,392,328,264,1014,754, 690,626,562,196,132,1001,988,924,860,796,494,430,366,302,728,664,600,536,234, 170,962,898,834,770,468,404,340,276,766,702,638,574,208,144,1013,936,872,808, 506,442,378,314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416, 352,288,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688, 624,560,194,130,986,922,858,794,492,428,364,300,726,662,598,534,232,168,960, 896,832,768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870,806,504, 440,376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558, 192,920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502,374, 736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995,684,556, 190,918,790,424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500,372, 734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320,993,682,554, 188,916,788,422,294,656,528,162,890,396,268,1018,758,630,136,992,864,498,370, 732,604,238,110,966,838,472,344,706,578,212,1017,940,812,446,318,991,680,552, 186,914,786,420,292,654,526,160] [views:debug,2014-08-19T16:49:44.325,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/977. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:44.326,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",977,active,0} [ns_server:debug,2014-08-19T16:49:44.467,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 975. Nacking mccouch update. [views:debug,2014-08-19T16:49:44.468,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/975. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:44.468,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",975,active,0} [ns_server:debug,2014-08-19T16:49:44.469,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748, 620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930, 802,436,308,981,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746, 618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928, 800,434,306,979,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744, 616,250,122,978,850,484,356,718,590,224,952,888,824,458,394,330,266,1016,756, 692,628,564,198,134,1003,990,926,862,798,496,432,368,304,977,730,666,602,538, 236,172,108,964,900,836,772,470,406,342,278,704,640,576,512,210,146,1015,938, 874,810,508,444,380,316,1002,989,742,678,614,550,248,184,120,976,912,848,784, 482,418,354,290,716,652,588,524,222,158,950,886,822,456,392,328,264,1014,754, 690,626,562,196,132,1001,988,924,860,796,494,430,366,302,975,728,664,600,536, 234,170,962,898,834,770,468,404,340,276,766,702,638,574,208,144,1013,936,872, 808,506,442,378,314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480, 416,352,288,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752, 688,624,560,194,130,986,922,858,794,492,428,364,300,726,662,598,534,232,168, 960,896,832,768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870,806, 504,440,376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350, 286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686, 558,192,920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868,502, 374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995,684, 556,190,918,790,424,296,658,530,164,892,398,270,1020,760,632,138,994,866,500, 372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320,993,682, 554,188,916,788,422,294,656,528,162,890,396,268,1018,758,630,136,992,864,498, 370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812,446,318,991,680, 552,186,914,786,420,292,654,526,160] [views:debug,2014-08-19T16:49:44.543,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/975. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:44.543,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",975,active,0} [ns_server:debug,2014-08-19T16:49:44.693,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 973. Nacking mccouch update. [views:debug,2014-08-19T16:49:44.693,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/973. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:44.693,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",973,active,0} [ns_server:debug,2014-08-19T16:49:44.695,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748, 620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930, 802,436,308,981,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746, 618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928, 800,434,306,979,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744, 616,250,122,978,850,484,356,718,590,224,952,888,824,458,394,330,266,1016,756, 692,628,564,198,134,1003,990,926,862,798,496,432,368,304,977,730,666,602,538, 236,172,108,964,900,836,772,470,406,342,278,704,640,576,512,210,146,1015,938, 874,810,508,444,380,316,1002,989,742,678,614,550,248,184,120,976,912,848,784, 482,418,354,290,716,652,588,524,222,158,950,886,822,456,392,328,264,1014,754, 690,626,562,196,132,1001,988,924,860,796,494,430,366,302,975,728,664,600,536, 234,170,962,898,834,770,468,404,340,276,766,702,638,574,208,144,1013,936,872, 808,506,442,378,314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480, 416,352,288,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752, 688,624,560,194,130,986,922,858,794,492,428,364,300,973,726,662,598,534,232, 168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870, 806,504,440,376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414, 350,286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997, 686,558,192,920,792,426,298,660,532,166,894,400,272,1022,762,634,140,996,868, 502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995, 684,556,190,918,790,424,296,658,530,164,892,398,270,1020,760,632,138,994,866, 500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320,993, 682,554,188,916,788,422,294,656,528,162,890,396,268,1018,758,630,136,992,864, 498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812,446,318,991, 680,552,186,914,786,420,292,654,526,160] [views:debug,2014-08-19T16:49:44.769,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/973. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:44.769,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",973,active,0} [ns_server:debug,2014-08-19T16:49:44.927,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 971. Nacking mccouch update. [views:debug,2014-08-19T16:49:44.927,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/971. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:44.928,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",971,active,0} [ns_server:debug,2014-08-19T16:49:44.929,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748, 620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930, 802,436,308,981,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746, 618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928, 800,434,306,979,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744, 616,250,122,978,850,484,356,718,590,224,952,888,824,458,394,330,266,1016,756, 692,628,564,198,134,1003,990,926,862,798,496,432,368,304,977,730,666,602,538, 236,172,108,964,900,836,772,470,406,342,278,704,640,576,512,210,146,1015,938, 874,810,508,444,380,316,1002,989,742,678,614,550,248,184,120,976,912,848,784, 482,418,354,290,716,652,588,524,222,158,950,886,822,456,392,328,264,1014,754, 690,626,562,196,132,1001,988,924,860,796,494,430,366,302,975,728,664,600,536, 234,170,962,898,834,770,468,404,340,276,766,702,638,574,208,144,1013,936,872, 808,506,442,378,314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480, 416,352,288,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752, 688,624,560,194,130,986,922,858,794,492,428,364,300,973,726,662,598,534,232, 168,960,896,832,768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870, 806,504,440,376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414, 350,286,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997, 686,558,192,920,792,426,298,971,660,532,166,894,400,272,1022,762,634,140,996, 868,502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322, 995,684,556,190,918,790,424,296,658,530,164,892,398,270,1020,760,632,138,994, 866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320, 993,682,554,188,916,788,422,294,656,528,162,890,396,268,1018,758,630,136,992, 864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812,446,318, 991,680,552,186,914,786,420,292,654,526,160] [views:debug,2014-08-19T16:49:44.995,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/971. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:44.995,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",971,active,0} [ns_server:debug,2014-08-19T16:49:45.153,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 969. Nacking mccouch update. [views:debug,2014-08-19T16:49:45.153,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/969. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:45.154,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",969,active,0} [ns_server:debug,2014-08-19T16:49:45.154,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748, 620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930, 802,436,308,981,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746, 618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928, 800,434,306,979,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744, 616,250,122,978,850,484,356,718,590,224,952,824,458,330,756,692,628,564,198, 134,1003,990,926,862,798,496,432,368,304,977,730,666,602,538,236,172,108,964, 900,836,772,470,406,342,278,704,640,576,512,210,146,1015,938,874,810,508,444, 380,316,1002,989,742,678,614,550,248,184,120,976,912,848,784,482,418,354,290, 716,652,588,524,222,158,950,886,822,456,392,328,264,1014,754,690,626,562,196, 132,1001,988,924,860,796,494,430,366,302,975,728,664,600,536,234,170,962,898, 834,770,468,404,340,276,766,702,638,574,208,144,1013,936,872,808,506,442,378, 314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,714, 650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688,624,560,194, 130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896,832, 768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870,806,504,440,376, 312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,712,648, 584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192,920, 792,426,298,971,660,532,166,894,400,272,1022,762,634,140,996,868,502,374,736, 608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995,684,556,190, 918,790,424,296,969,658,530,164,892,398,270,1020,760,632,138,994,866,500,372, 734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320,993,682,554, 188,916,788,422,294,656,528,162,890,396,268,1018,758,630,136,992,864,498,370, 732,604,238,110,966,838,472,344,706,578,212,1017,940,812,446,318,991,680,552, 186,914,786,420,292,654,526,160,888,394,266,1016] [rebalance:info,2014-08-19T16:49:45.201,ns_1@10.242.238.88:<0.28137.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 767) [rebalance:info,2014-08-19T16:49:45.201,ns_1@10.242.238.88:<0.26107.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 508 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:49:45.201,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 508 state to active [rebalance:info,2014-08-19T16:49:45.202,ns_1@10.242.238.88:<0.25795.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:45.203,ns_1@10.242.238.88:<0.26107.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 508 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:45.203,ns_1@10.242.238.88:<0.26107.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:49:45.205,ns_1@10.242.238.88:<0.25803.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_767_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:45.206,ns_1@10.242.238.88:<0.25795.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:49:45.209,ns_1@10.242.238.88:<0.25795.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 767 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.28144.0> [ns_server:info,2014-08-19T16:49:45.210,ns_1@10.242.238.88:<0.28144.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 767 to state replica [views:debug,2014-08-19T16:49:45.229,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/969. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:45.229,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",969,active,0} [ns_server:debug,2014-08-19T16:49:45.253,ns_1@10.242.238.88:<0.28144.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_767 [rebalance:info,2014-08-19T16:49:45.255,ns_1@10.242.238.88:<0.28144.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[767]}, {checkpoints,[{767,1}]}, {name,<<"rebalance_767">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[767]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"767"}]} [rebalance:debug,2014-08-19T16:49:45.255,ns_1@10.242.238.88:<0.28144.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28145.0> [rebalance:info,2014-08-19T16:49:45.256,ns_1@10.242.238.88:<0.28144.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:45.258,ns_1@10.242.238.88:<0.28144.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:45.258,ns_1@10.242.238.88:<0.28144.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:45.259,ns_1@10.242.238.88:<0.25795.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 767 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:45.261,ns_1@10.242.238.88:<0.25803.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:45.267,ns_1@10.242.238.88:<0.25803.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_767_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:45.267,ns_1@10.242.238.88:<0.25911.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 510 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:49:45.267,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 767 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:49:45.267,ns_1@10.242.238.88:<0.28149.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 767 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:info,2014-08-19T16:49:45.267,ns_1@10.242.238.88:<0.28150.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1023) [ns_server:info,2014-08-19T16:49:45.267,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 510 state to active [rebalance:info,2014-08-19T16:49:45.268,ns_1@10.242.238.88:<0.25911.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 510 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:45.269,ns_1@10.242.238.88:<0.25911.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:45.270,ns_1@10.242.238.88:<0.25760.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:debug,2014-08-19T16:49:45.272,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:45.274,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:45.274,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:45.274,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:49:45.274,ns_1@10.242.238.88:<0.25768.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1023_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:45.274,ns_1@10.242.238.88:<0.25760.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:49:45.274,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{767, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:45.282,ns_1@10.242.238.88:<0.25760.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1023 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.28165.0> [ns_server:info,2014-08-19T16:49:45.283,ns_1@10.242.238.88:<0.28165.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1023 to state replica [rebalance:info,2014-08-19T16:49:45.287,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 767 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:45.288,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 767) [ns_server:debug,2014-08-19T16:49:45.289,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:49:45.322,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 967. Nacking mccouch update. [views:debug,2014-08-19T16:49:45.323,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/967. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:45.323,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",967,active,0} [ns_server:debug,2014-08-19T16:49:45.323,ns_1@10.242.238.88:<0.28165.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1023 [rebalance:info,2014-08-19T16:49:45.325,ns_1@10.242.238.88:<0.28165.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1023]}, {checkpoints,[{1023,1}]}, {name,<<"rebalance_1023">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1023]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1023"}]} [rebalance:info,2014-08-19T16:49:45.325,ns_1@10.242.238.88:<0.26447.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 760 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:49:45.326,ns_1@10.242.238.88:<0.27746.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 491 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:debug,2014-08-19T16:49:45.325,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748, 620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930, 802,436,308,981,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746, 618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928, 800,434,306,979,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744, 616,250,122,978,850,484,356,718,590,224,952,824,458,330,756,692,628,564,198, 134,1003,990,926,862,798,496,432,368,304,977,730,666,602,538,236,172,108,964, 900,836,772,470,406,342,278,704,640,576,512,210,146,1015,938,874,810,508,444, 380,316,1002,989,742,678,614,550,248,184,120,976,912,848,784,482,418,354,290, 716,652,588,524,222,158,950,886,822,456,392,328,264,1014,754,690,626,562,196, 132,1001,988,924,860,796,494,430,366,302,975,728,664,600,536,234,170,962,898, 834,770,468,404,340,276,766,702,638,574,208,144,1013,936,872,808,506,442,378, 314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,714, 650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688,624,560,194, 130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896,832, 768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870,806,504,440,376, 312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,712,648, 584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192,920, 792,426,298,971,660,532,166,894,400,272,1022,762,634,140,996,868,502,374,736, 608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995,684,556,190, 918,790,424,296,969,658,530,164,892,398,270,1020,760,632,138,994,866,500,372, 734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320,993,682,554, 188,916,788,422,294,967,656,528,162,890,396,268,1018,758,630,136,992,864,498, 370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812,446,318,991,680, 552,186,914,786,420,292,654,526,160,888,394,266,1016] [rebalance:debug,2014-08-19T16:49:45.326,ns_1@10.242.238.88:<0.28165.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28181.0> [ns_server:info,2014-08-19T16:49:45.326,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 760 state to active [rebalance:info,2014-08-19T16:49:45.328,ns_1@10.242.238.88:<0.26447.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 760 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:45.328,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 491 state to active [rebalance:info,2014-08-19T16:49:45.328,ns_1@10.242.238.88:<0.28165.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:49:45.330,ns_1@10.242.238.88:<0.27746.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 491 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:45.330,ns_1@10.242.238.88:<0.26447.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:debug,2014-08-19T16:49:45.333,ns_1@10.242.238.88:<0.28165.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:45.333,ns_1@10.242.238.88:<0.28165.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:45.333,ns_1@10.242.238.88:<0.27746.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:45.334,ns_1@10.242.238.88:<0.25760.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1023 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:45.335,ns_1@10.242.238.88:<0.25768.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:45.339,ns_1@10.242.238.88:<0.25768.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1023_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:45.339,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1023 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:45.339,ns_1@10.242.238.88:<0.28193.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1023 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:49:45.344,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:45.345,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:45.346,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:45.346,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:45.345,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1023, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:49:45.362,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1023 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:45.362,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1023) [ns_server:debug,2014-08-19T16:49:45.363,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [views:debug,2014-08-19T16:49:45.374,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/967. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:45.374,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",967,active,0} [ns_server:debug,2014-08-19T16:49:45.448,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 965. Nacking mccouch update. [views:debug,2014-08-19T16:49:45.449,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/965. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:45.449,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",965,active,0} [ns_server:debug,2014-08-19T16:49:45.450,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748, 620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930, 802,436,308,981,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746, 618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928, 800,434,306,979,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744, 616,250,122,978,850,484,356,718,590,224,952,824,458,330,756,692,628,564,198, 134,1003,990,926,862,798,496,432,368,304,977,730,666,602,538,236,172,108,964, 900,836,772,470,406,342,278,704,640,576,512,210,146,1015,938,874,810,508,444, 380,316,1002,989,742,678,614,550,248,184,120,976,912,848,784,482,418,354,290, 716,652,588,524,222,158,950,886,822,456,392,328,264,1014,754,690,626,562,196, 132,1001,988,924,860,796,494,430,366,302,975,728,664,600,536,234,170,962,898, 834,770,468,404,340,276,766,702,638,574,208,144,1013,936,872,808,506,442,378, 314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,714, 650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688,624,560,194, 130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896,832, 768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870,806,504,440,376, 312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,712,648, 584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192,920, 792,426,298,971,660,532,166,894,400,272,1022,762,634,140,996,868,502,374,736, 608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995,684,556,190, 918,790,424,296,969,658,530,164,892,398,270,1020,760,632,138,994,866,500,372, 734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320,993,682,554, 188,916,788,422,294,967,656,528,162,890,396,268,1018,758,630,136,992,864,498, 370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812,446,318,991,680, 552,186,914,786,420,292,965,654,526,160,888,394,266,1016] [views:debug,2014-08-19T16:49:45.482,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/965. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:45.482,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",965,active,0} [rebalance:info,2014-08-19T16:49:45.542,ns_1@10.242.238.88:<0.27547.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 493 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:49:45.542,ns_1@10.242.238.88:<0.26254.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 762 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:49:45.543,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 493 state to active [rebalance:info,2014-08-19T16:49:45.544,ns_1@10.242.238.88:<0.27547.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 493 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:45.544,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 762 state to active [rebalance:info,2014-08-19T16:49:45.545,ns_1@10.242.238.88:<0.26254.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 762 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:45.545,ns_1@10.242.238.88:<0.27547.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:45.546,ns_1@10.242.238.88:<0.26254.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:45.626,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 963. Nacking mccouch update. [views:debug,2014-08-19T16:49:45.626,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/963. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:45.626,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",963,active,0} [ns_server:debug,2014-08-19T16:49:45.627,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748, 620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930, 802,436,308,981,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746, 618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928, 800,434,306,979,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744, 616,250,122,978,850,484,356,718,590,224,952,824,458,330,756,692,628,564,198, 134,1003,990,926,862,798,496,432,368,304,977,730,666,602,538,236,172,108,964, 900,836,772,470,406,342,278,704,640,576,512,210,146,1015,938,874,810,508,444, 380,316,1002,989,742,678,614,550,248,184,120,976,912,848,784,482,418,354,290, 963,716,652,588,524,222,158,950,886,822,456,392,328,264,1014,754,690,626,562, 196,132,1001,988,924,860,796,494,430,366,302,975,728,664,600,536,234,170,962, 898,834,770,468,404,340,276,766,702,638,574,208,144,1013,936,872,808,506,442, 378,314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288, 714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688,624,560, 194,130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896, 832,768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870,806,504,440, 376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,712, 648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192, 920,792,426,298,971,660,532,166,894,400,272,1022,762,634,140,996,868,502,374, 736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995,684,556, 190,918,790,424,296,969,658,530,164,892,398,270,1020,760,632,138,994,866,500, 372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320,993,682, 554,188,916,788,422,294,967,656,528,162,890,396,268,1018,758,630,136,992,864, 498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812,446,318,991, 680,552,186,914,786,420,292,965,654,526,160,888,394,266,1016] [rebalance:info,2014-08-19T16:49:45.693,ns_1@10.242.238.88:<0.26085.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 764 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:49:45.693,ns_1@10.242.238.88:<0.27351.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 495 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:49:45.693,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 764 state to active [rebalance:info,2014-08-19T16:49:45.694,ns_1@10.242.238.88:<0.26085.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 764 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:45.694,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 495 state to active [rebalance:info,2014-08-19T16:49:45.695,ns_1@10.242.238.88:<0.27351.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 495 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:45.696,ns_1@10.242.238.88:<0.26085.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:45.696,ns_1@10.242.238.88:<0.27351.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:49:45.710,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/963. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:45.710,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",963,active,0} [rebalance:info,2014-08-19T16:49:45.818,ns_1@10.242.238.88:<0.27169.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 497 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:49:45.818,ns_1@10.242.238.88:<0.28261.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 766) [ns_server:info,2014-08-19T16:49:45.819,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 497 state to active [rebalance:info,2014-08-19T16:49:45.820,ns_1@10.242.238.88:<0.25872.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:45.820,ns_1@10.242.238.88:<0.27169.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 497 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:45.821,ns_1@10.242.238.88:<0.27169.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:49:45.823,ns_1@10.242.238.88:<0.25880.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_766_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:45.823,ns_1@10.242.238.88:<0.25872.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:49:45.825,ns_1@10.242.238.88:<0.25872.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 766 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.28268.0> [ns_server:info,2014-08-19T16:49:45.826,ns_1@10.242.238.88:<0.28268.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 766 to state replica [ns_server:debug,2014-08-19T16:49:45.869,ns_1@10.242.238.88:<0.28268.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_766 [rebalance:info,2014-08-19T16:49:45.870,ns_1@10.242.238.88:<0.28268.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[766]}, {checkpoints,[{766,1}]}, {name,<<"rebalance_766">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[766]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"766"}]} [rebalance:debug,2014-08-19T16:49:45.871,ns_1@10.242.238.88:<0.28268.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28269.0> [rebalance:info,2014-08-19T16:49:45.872,ns_1@10.242.238.88:<0.28268.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:45.874,ns_1@10.242.238.88:<0.28268.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:45.874,ns_1@10.242.238.88:<0.28268.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:45.875,ns_1@10.242.238.88:<0.25872.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 766 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:45.876,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 961. Nacking mccouch update. [rebalance:debug,2014-08-19T16:49:45.876,ns_1@10.242.238.88:<0.25880.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [views:debug,2014-08-19T16:49:45.876,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/961. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:45.877,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",961,active,0} [ns_server:debug,2014-08-19T16:49:45.878,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748, 620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930, 802,436,308,981,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746, 618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928, 800,434,306,979,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744, 616,250,122,978,850,484,356,718,590,224,952,824,458,330,756,692,628,564,198, 134,1003,990,926,862,798,496,432,368,304,977,730,666,602,538,236,172,108,964, 900,836,772,470,406,342,278,704,640,576,512,210,146,1015,938,874,810,508,444, 380,316,1002,989,742,678,614,550,248,184,120,976,912,848,784,482,418,354,290, 963,716,652,588,524,222,158,950,886,822,456,392,328,264,1014,754,690,626,562, 196,132,1001,988,924,860,796,494,430,366,302,975,728,664,600,536,234,170,962, 898,834,770,468,404,340,276,766,702,638,574,208,144,1013,936,872,808,506,442, 378,314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288, 961,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688,624, 560,194,130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960, 896,832,768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870,806,504, 440,376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558, 192,920,792,426,298,971,660,532,166,894,400,272,1022,762,634,140,996,868,502, 374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995,684, 556,190,918,790,424,296,969,658,530,164,892,398,270,1020,760,632,138,994,866, 500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320,993, 682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,758,630,136,992, 864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812,446,318, 991,680,552,186,914,786,420,292,965,654,526,160,888,394,266,1016] [ns_server:info,2014-08-19T16:49:45.881,ns_1@10.242.238.88:<0.25880.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_766_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:45.881,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 766 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:49:45.882,ns_1@10.242.238.88:<0.28273.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 766 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:info,2014-08-19T16:49:45.893,ns_1@10.242.238.88:<0.26412.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1016 state change: {'ns_1@10.242.238.88',active,paused, undefined} [rebalance:info,2014-08-19T16:49:45.893,ns_1@10.242.238.88:<0.26968.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 499 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:49:45.893,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 1016 state to active [ns_server:debug,2014-08-19T16:49:45.895,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:45.895,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:45.895,ns_1@10.242.238.88:<0.26412.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1016 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:45.895,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 499 state to active [ns_server:debug,2014-08-19T16:49:45.895,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:45.895,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{766, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:45.896,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:45.897,ns_1@10.242.238.88:<0.26968.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 499 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:45.897,ns_1@10.242.238.88:<0.26412.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:45.897,ns_1@10.242.238.88:<0.26968.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:45.904,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 766 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:45.905,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 766) [ns_server:debug,2014-08-19T16:49:45.906,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [views:debug,2014-08-19T16:49:45.960,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/961. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:45.960,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",961,active,0} [rebalance:info,2014-08-19T16:49:45.994,ns_1@10.242.238.88:<0.26799.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 501 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:49:45.994,ns_1@10.242.238.88:<0.26233.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1018 state change: {'ns_1@10.242.238.88',active,paused, undefined} [ns_server:info,2014-08-19T16:49:45.995,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 501 state to active [rebalance:info,2014-08-19T16:49:45.996,ns_1@10.242.238.88:<0.26799.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 501 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:45.996,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 1018 state to active [rebalance:info,2014-08-19T16:49:45.997,ns_1@10.242.238.88:<0.26233.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1018 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:45.997,ns_1@10.242.238.88:<0.26799.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:45.998,ns_1@10.242.238.88:<0.26233.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:46.120,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 959. Nacking mccouch update. [views:debug,2014-08-19T16:49:46.120,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/959. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:46.120,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",959,active,0} [ns_server:debug,2014-08-19T16:49:46.121,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,646,518,152,880,386,258,1008,748, 620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007,930, 802,436,308,981,670,542,176,904,776,410,282,644,516,150,878,384,256,1006,746, 618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005,928, 800,434,306,979,668,540,174,902,774,408,280,642,514,148,876,510,382,1004,744, 616,250,122,978,850,484,356,718,590,224,952,824,458,330,692,564,198,1003,990, 926,862,798,496,432,368,304,977,730,666,602,538,236,172,108,964,900,836,772, 470,406,342,278,704,640,576,512,210,146,1015,938,874,810,508,444,380,316, 1002,989,742,678,614,550,248,184,120,976,912,848,784,482,418,354,290,963,716, 652,588,524,222,158,950,886,822,456,392,328,264,1014,754,690,626,562,196,132, 1001,988,924,860,796,494,430,366,302,975,728,664,600,536,234,170,962,898,834, 770,468,404,340,276,766,702,638,574,208,144,1013,936,872,808,506,442,378,314, 1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961,714, 650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688,624,560,194, 130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896,832, 768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870,806,504,440,376, 312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,712, 648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192, 920,792,426,298,971,660,532,166,894,400,272,1022,762,634,140,996,868,502,374, 736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995,684,556, 190,918,790,424,296,969,658,530,164,892,398,270,1020,760,632,138,994,866,500, 372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320,993,682, 554,188,916,788,422,294,967,656,528,162,890,396,268,1018,758,630,136,992,864, 498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812,446,318,991, 680,552,186,914,786,420,292,965,654,526,160,888,394,266,1016,756,628,134] [rebalance:info,2014-08-19T16:49:46.131,ns_1@10.242.238.88:<0.26572.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 503 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:49:46.131,ns_1@10.242.238.88:<0.26042.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1020 state change: {'ns_1@10.242.238.88',active,paused, undefined} [ns_server:info,2014-08-19T16:49:46.132,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 503 state to active [rebalance:info,2014-08-19T16:49:46.132,ns_1@10.242.238.88:<0.26572.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 503 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:46.133,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 1020 state to active [rebalance:info,2014-08-19T16:49:46.134,ns_1@10.242.238.88:<0.26042.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1020 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:46.134,ns_1@10.242.238.88:<0.26572.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:46.135,ns_1@10.242.238.88:<0.26042.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:49:46.171,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/959. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:46.171,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",959,active,0} [ns_server:debug,2014-08-19T16:49:46.246,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 957. Nacking mccouch update. [views:debug,2014-08-19T16:49:46.246,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/957. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:46.246,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",957,active,0} [ns_server:debug,2014-08-19T16:49:46.247,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258,1008, 748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007, 930,802,436,308,981,670,542,176,904,776,410,282,644,516,150,878,384,256,1006, 746,618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200,1005, 928,800,434,306,979,668,540,174,902,774,408,280,642,514,148,876,510,382,1004, 744,616,250,122,978,850,484,356,718,590,224,952,824,458,330,692,564,198,1003, 990,926,862,798,496,432,368,304,977,730,666,602,538,236,172,108,964,900,836, 772,470,406,342,278,704,640,576,512,210,146,1015,938,874,810,508,444,380,316, 1002,989,742,678,614,550,248,184,120,976,912,848,784,482,418,354,290,963,716, 652,588,524,222,158,950,886,822,456,392,328,264,1014,754,690,626,562,196,132, 1001,988,924,860,796,494,430,366,302,975,728,664,600,536,234,170,962,898,834, 770,468,404,340,276,766,702,638,574,208,144,1013,936,872,808,506,442,378,314, 1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961,714, 650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688,624,560,194, 130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896,832, 768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870,806,504,440,376, 312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,712, 648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192, 920,792,426,298,971,660,532,166,894,400,272,1022,762,634,140,996,868,502,374, 736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995,684,556, 190,918,790,424,296,969,658,530,164,892,398,270,1020,760,632,138,994,866,500, 372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320,993,682, 554,188,916,788,422,294,967,656,528,162,890,396,268,1018,758,630,136,992,864, 498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812,446,318,991, 680,552,186,914,786,420,292,965,654,526,160,888,394,266,1016,756,628,134] [rebalance:info,2014-08-19T16:49:46.248,ns_1@10.242.238.88:<0.25851.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1022 state change: {'ns_1@10.242.238.88',active,paused, undefined} [rebalance:info,2014-08-19T16:49:46.248,ns_1@10.242.238.88:<0.26391.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 505 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:49:46.249,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 1022 state to active [rebalance:info,2014-08-19T16:49:46.250,ns_1@10.242.238.88:<0.25851.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1022 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:46.250,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 505 state to active [rebalance:info,2014-08-19T16:49:46.251,ns_1@10.242.238.88:<0.26391.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 505 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:46.251,ns_1@10.242.238.88:<0.25851.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:46.251,ns_1@10.242.238.88:<0.26391.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:49:46.280,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/957. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:46.280,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",957,active,0} [rebalance:info,2014-08-19T16:49:46.366,ns_1@10.242.238.88:<0.26198.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 507 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:49:46.366,ns_1@10.242.238.88:<0.28357.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1022) [ns_server:info,2014-08-19T16:49:46.367,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 507 state to active [rebalance:info,2014-08-19T16:49:46.367,ns_1@10.242.238.88:<0.25851.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:49:46.368,ns_1@10.242.238.88:<0.26198.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 507 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:46.369,ns_1@10.242.238.88:<0.26198.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:49:46.370,ns_1@10.242.238.88:<0.25859.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1022_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:46.371,ns_1@10.242.238.88:<0.25851.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:49:46.374,ns_1@10.242.238.88:<0.25851.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1022 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.28367.0> [ns_server:info,2014-08-19T16:49:46.374,ns_1@10.242.238.88:<0.28367.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1022 to state replica [ns_server:debug,2014-08-19T16:49:46.416,ns_1@10.242.238.88:<0.28367.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1022 [rebalance:info,2014-08-19T16:49:46.418,ns_1@10.242.238.88:<0.28367.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1022]}, {checkpoints,[{1022,1}]}, {name,<<"rebalance_1022">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1022]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1022"}]} [rebalance:debug,2014-08-19T16:49:46.419,ns_1@10.242.238.88:<0.28367.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28373.0> [rebalance:info,2014-08-19T16:49:46.420,ns_1@10.242.238.88:<0.28367.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:46.422,ns_1@10.242.238.88:<0.28367.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:46.422,ns_1@10.242.238.88:<0.28367.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:46.423,ns_1@10.242.238.88:<0.25851.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1022 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:46.425,ns_1@10.242.238.88:<0.25859.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:49:46.425,ns_1@10.242.238.88:<0.27654.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 492 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:49:46.425,ns_1@10.242.238.88:<0.26007.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 509 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:49:46.426,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 492 state to active [rebalance:info,2014-08-19T16:49:46.427,ns_1@10.242.238.88:<0.27654.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 492 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:46.427,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 509 state to active [rebalance:info,2014-08-19T16:49:46.428,ns_1@10.242.238.88:<0.26007.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 509 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:46.428,ns_1@10.242.238.88:<0.27654.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:46.429,ns_1@10.242.238.88:<0.26007.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:49:46.430,ns_1@10.242.238.88:<0.25859.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1022_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:46.431,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1022 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:46.431,ns_1@10.242.238.88:<0.28385.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1022 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:49:46.440,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 955. Nacking mccouch update. [views:debug,2014-08-19T16:49:46.440,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/955. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:46.440,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",955,active,0} [ns_server:debug,2014-08-19T16:49:46.441,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258,1008, 748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007, 930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878,384,256, 1006,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200, 1005,928,800,434,306,979,668,540,174,902,774,408,280,642,514,148,876,510,382, 1004,744,616,250,122,978,850,484,356,718,590,224,952,824,458,330,692,564,198, 1003,990,926,862,798,496,432,368,304,977,730,666,602,538,236,172,108,964,900, 836,772,470,406,342,278,704,640,576,512,210,146,1015,938,874,810,508,444,380, 316,1002,989,742,678,614,550,248,184,120,976,912,848,784,482,418,354,290,963, 716,652,588,524,222,158,950,886,822,456,392,328,264,1014,754,690,626,562,196, 132,1001,988,924,860,796,494,430,366,302,975,728,664,600,536,234,170,962,898, 834,770,468,404,340,276,766,702,638,574,208,144,1013,936,872,808,506,442,378, 314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961, 714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688,624,560, 194,130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896, 832,768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870,806,504,440, 376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558, 192,920,792,426,298,971,660,532,166,894,400,272,1022,762,634,140,996,868,502, 374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995,684, 556,190,918,790,424,296,969,658,530,164,892,398,270,1020,760,632,138,994,866, 500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320,993, 682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,758,630,136,992, 864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812,446,318, 991,680,552,186,914,786,420,292,965,654,526,160,888,394,266,1016,756,628,134] [ns_server:debug,2014-08-19T16:49:46.445,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:46.446,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1022, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:46.448,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:46.448,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:46.448,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:46.457,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1022 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:46.458,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1022) [ns_server:debug,2014-08-19T16:49:46.459,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [views:debug,2014-08-19T16:49:46.474,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/955. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:46.474,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",955,active,0} [ns_server:debug,2014-08-19T16:49:46.549,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 953. Nacking mccouch update. [views:debug,2014-08-19T16:49:46.549,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/953. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:46.549,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",953,active,0} [ns_server:debug,2014-08-19T16:49:46.550,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258,1008, 748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007, 930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878,384,256, 1006,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200, 1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514,148,876,510, 382,1004,744,616,250,122,978,850,484,356,718,590,224,952,824,458,330,692,564, 198,1003,990,926,862,798,496,432,368,304,977,730,666,602,538,236,172,108,964, 900,836,772,470,406,342,278,704,640,576,512,210,146,1015,938,874,810,508,444, 380,316,1002,989,742,678,614,550,248,184,120,976,912,848,784,482,418,354,290, 963,716,652,588,524,222,158,950,886,822,456,392,328,264,1014,754,690,626,562, 196,132,1001,988,924,860,796,494,430,366,302,975,728,664,600,536,234,170,962, 898,834,770,468,404,340,276,766,702,638,574,208,144,1013,936,872,808,506,442, 378,314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288, 961,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688,624, 560,194,130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960, 896,832,768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870,806,504, 440,376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 959,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686, 558,192,920,792,426,298,971,660,532,166,894,400,272,1022,762,634,140,996,868, 502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995, 684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,760,632,138,994, 866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320, 993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,758,630,136, 992,864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812,446, 318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266,1016,756,628, 134] [rebalance:info,2014-08-19T16:49:46.566,ns_1@10.242.238.88:<0.27442.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 494 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:49:46.567,ns_1@10.242.238.88:<0.27711.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 747 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:49:46.567,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 494 state to active [rebalance:info,2014-08-19T16:49:46.568,ns_1@10.242.238.88:<0.27442.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 494 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:46.568,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 747 state to active [rebalance:info,2014-08-19T16:49:46.569,ns_1@10.242.238.88:<0.27711.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 747 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:46.570,ns_1@10.242.238.88:<0.27442.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:46.570,ns_1@10.242.238.88:<0.27711.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:49:46.583,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/953. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:46.583,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",953,active,0} [rebalance:info,2014-08-19T16:49:46.667,ns_1@10.242.238.88:<0.27260.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 496 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:49:46.667,ns_1@10.242.238.88:<0.27512.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 749 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:49:46.668,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 496 state to active [rebalance:info,2014-08-19T16:49:46.669,ns_1@10.242.238.88:<0.27260.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 496 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:46.669,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 749 state to active [rebalance:info,2014-08-19T16:49:46.670,ns_1@10.242.238.88:<0.27512.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 749 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:46.670,ns_1@10.242.238.88:<0.27260.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:46.671,ns_1@10.242.238.88:<0.27512.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:46.708,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 951. Nacking mccouch update. [views:debug,2014-08-19T16:49:46.708,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/951. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:46.708,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",951,active,0} [ns_server:debug,2014-08-19T16:49:46.709,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258,1008, 748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007, 930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878,384,256, 1006,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200, 1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514,148,876,510, 382,1004,744,616,250,122,978,850,484,356,718,590,224,952,824,458,330,692,564, 198,1003,990,926,862,798,496,432,368,304,977,730,666,602,538,236,172,108,964, 900,836,772,470,406,342,278,951,704,640,576,512,210,146,1015,938,874,810,508, 444,380,316,1002,989,742,678,614,550,248,184,120,976,912,848,784,482,418,354, 290,963,716,652,588,524,222,158,950,886,822,456,392,328,264,1014,754,690,626, 562,196,132,1001,988,924,860,796,494,430,366,302,975,728,664,600,536,234,170, 962,898,834,770,468,404,340,276,766,702,638,574,208,144,1013,936,872,808,506, 442,378,314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352, 288,961,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688, 624,560,194,130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168, 960,896,832,768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870,806, 504,440,376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350, 286,959,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997, 686,558,192,920,792,426,298,971,660,532,166,894,400,272,1022,762,634,140,996, 868,502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322, 995,684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,760,632,138, 994,866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448, 320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,758,630, 136,992,864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812, 446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266,1016,756, 628,134] [rebalance:info,2014-08-19T16:49:46.767,ns_1@10.242.238.88:<0.27078.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 498 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:49:46.767,ns_1@10.242.238.88:<0.27316.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 751 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:49:46.768,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 498 state to active [rebalance:info,2014-08-19T16:49:46.769,ns_1@10.242.238.88:<0.27078.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 498 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:46.769,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 751 state to active [rebalance:info,2014-08-19T16:49:46.770,ns_1@10.242.238.88:<0.27316.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 751 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:46.771,ns_1@10.242.238.88:<0.27078.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:46.771,ns_1@10.242.238.88:<0.27316.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:49:46.777,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/951. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:46.777,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",951,active,0} [rebalance:info,2014-08-19T16:49:46.868,ns_1@10.242.238.88:<0.26891.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 500 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:49:46.868,ns_1@10.242.238.88:<0.27148.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 753 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:49:46.868,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 500 state to active [rebalance:info,2014-08-19T16:49:46.869,ns_1@10.242.238.88:<0.26891.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 500 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:46.870,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 753 state to active [rebalance:info,2014-08-19T16:49:46.870,ns_1@10.242.238.88:<0.27148.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 753 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:46.871,ns_1@10.242.238.88:<0.26891.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:46.871,ns_1@10.242.238.88:<0.27148.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:46.927,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 949. Nacking mccouch update. [views:debug,2014-08-19T16:49:46.928,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/949. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:46.928,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",949,active,0} [ns_server:debug,2014-08-19T16:49:46.929,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258,1008, 748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007, 930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878,384,256, 1006,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200, 1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514,148,876,510, 382,1004,744,616,250,122,978,850,484,356,718,590,224,952,824,458,330,692,564, 198,1003,926,798,432,304,977,730,666,602,538,236,172,108,964,900,836,772,470, 406,342,278,951,704,640,576,512,210,146,1015,938,874,810,508,444,380,316, 1002,989,742,678,614,550,248,184,120,976,912,848,784,482,418,354,290,963,716, 652,588,524,222,158,950,886,822,456,392,328,264,1014,754,690,626,562,196,132, 1001,988,924,860,796,494,430,366,302,975,728,664,600,536,234,170,962,898,834, 770,468,404,340,276,949,766,702,638,574,208,144,1013,936,872,808,506,442,378, 314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961, 714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688,624,560, 194,130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896, 832,768,466,402,338,274,764,700,636,572,206,142,1011,998,934,870,806,504,440, 376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558, 192,920,792,426,298,971,660,532,166,894,400,272,1022,762,634,140,996,868,502, 374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995,684, 556,190,918,790,424,296,969,658,530,164,892,398,270,1020,760,632,138,994,866, 500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320,993, 682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,758,630,136,992, 864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812,446,318, 991,680,552,186,914,786,420,292,965,654,526,160,888,394,266,1016,756,628,134, 990,862,496,368] [rebalance:info,2014-08-19T16:49:46.978,ns_1@10.242.238.88:<0.26677.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 502 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:49:46.978,ns_1@10.242.238.88:<0.26947.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 755 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:49:46.978,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 502 state to active [rebalance:info,2014-08-19T16:49:46.979,ns_1@10.242.238.88:<0.26677.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 502 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:46.980,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 755 state to active [rebalance:info,2014-08-19T16:49:46.980,ns_1@10.242.238.88:<0.26947.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 755 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:46.981,ns_1@10.242.238.88:<0.26677.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:46.981,ns_1@10.242.238.88:<0.26947.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:49:46.986,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/949. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:46.987,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",949,active,0} [rebalance:info,2014-08-19T16:49:47.112,ns_1@10.242.238.88:<0.28494.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 504) [rebalance:info,2014-08-19T16:49:47.112,ns_1@10.242.238.88:<0.26778.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 757 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:49:47.112,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 757 state to active [rebalance:info,2014-08-19T16:49:47.114,ns_1@10.242.238.88:<0.26482.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:49:47.115,ns_1@10.242.238.88:<0.26778.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 757 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:47.115,ns_1@10.242.238.88:<0.26778.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:49:47.117,ns_1@10.242.238.88:<0.26490.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_504_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:49:47.118,ns_1@10.242.238.88:<0.26482.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:49:47.120,ns_1@10.242.238.88:<0.26482.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 504 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.28501.0> [ns_server:info,2014-08-19T16:49:47.121,ns_1@10.242.238.88:<0.28501.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 504 to state replica [ns_server:debug,2014-08-19T16:49:47.162,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 947. Nacking mccouch update. [views:debug,2014-08-19T16:49:47.162,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/947. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:47.162,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",947,active,0} [ns_server:debug,2014-08-19T16:49:47.163,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258,1008, 748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007, 930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878,384,256, 1006,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200, 1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514,148,876,510, 382,1004,744,616,250,122,978,850,484,356,718,590,224,952,824,458,330,692,564, 198,1003,926,798,432,304,977,730,666,602,538,236,172,108,964,900,836,772,470, 406,342,278,951,704,640,576,512,210,146,1015,938,874,810,508,444,380,316, 1002,989,742,678,614,550,248,184,120,976,912,848,784,482,418,354,290,963,716, 652,588,524,222,158,950,886,822,456,392,328,264,1014,754,690,626,562,196,132, 1001,988,924,860,796,494,430,366,302,975,728,664,600,536,234,170,962,898,834, 770,468,404,340,276,949,766,702,638,574,208,144,1013,936,872,808,506,442,378, 314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961, 714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688,624,560, 194,130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896, 832,768,466,402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504, 440,376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 959,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686, 558,192,920,792,426,298,971,660,532,166,894,400,272,1022,762,634,140,996,868, 502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995, 684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,760,632,138,994, 866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320, 993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,758,630,136, 992,864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812,446, 318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266,1016,756,628, 134,990,862,496,368] [ns_server:debug,2014-08-19T16:49:47.164,ns_1@10.242.238.88:<0.28501.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_504 [rebalance:info,2014-08-19T16:49:47.166,ns_1@10.242.238.88:<0.28501.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[504]}, {checkpoints,[{504,1}]}, {name,<<"rebalance_504">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[504]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"504"}]} [rebalance:debug,2014-08-19T16:49:47.167,ns_1@10.242.238.88:<0.28501.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28507.0> [rebalance:info,2014-08-19T16:49:47.167,ns_1@10.242.238.88:<0.28501.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:47.170,ns_1@10.242.238.88:<0.28501.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:47.170,ns_1@10.242.238.88:<0.28501.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:47.171,ns_1@10.242.238.88:<0.26482.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 504 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:47.173,ns_1@10.242.238.88:<0.26490.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:47.178,ns_1@10.242.238.88:<0.26490.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_504_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:47.178,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 504 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:49:47.178,ns_1@10.242.238.88:<0.28511.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 504 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:info,2014-08-19T16:49:47.189,ns_1@10.242.238.88:<0.28512.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 506) [rebalance:info,2014-08-19T16:49:47.190,ns_1@10.242.238.88:<0.26543.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 759 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:49:47.190,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 759 state to active [rebalance:info,2014-08-19T16:49:47.190,ns_1@10.242.238.88:<0.26289.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [ns_server:debug,2014-08-19T16:49:47.191,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:47.191,ns_1@10.242.238.88:<0.26543.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 759 on ns_1@10.242.238.88 [ns_server:debug,2014-08-19T16:49:47.192,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{504, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:49:47.192,ns_1@10.242.238.88:<0.26543.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:47.192,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:47.193,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:47.194,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:49:47.194,ns_1@10.242.238.88:<0.26297.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_506_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:49:47.194,ns_1@10.242.238.88:<0.26289.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:49:47.200,ns_1@10.242.238.88:<0.26289.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 506 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.28527.0> [ns_server:info,2014-08-19T16:49:47.201,ns_1@10.242.238.88:<0.28527.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 506 to state replica [rebalance:info,2014-08-19T16:49:47.201,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 504 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:47.202,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 504) [ns_server:debug,2014-08-19T16:49:47.203,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [views:debug,2014-08-19T16:49:47.221,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/947. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:47.221,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",947,active,0} [rebalance:info,2014-08-19T16:49:47.246,ns_1@10.242.238.88:<0.28529.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 508) [rebalance:info,2014-08-19T16:49:47.246,ns_1@10.242.238.88:<0.26355.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 761 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:debug,2014-08-19T16:49:47.246,ns_1@10.242.238.88:<0.28527.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_506 [ns_server:info,2014-08-19T16:49:47.247,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 761 state to active [rebalance:info,2014-08-19T16:49:47.247,ns_1@10.242.238.88:<0.26107.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:49:47.257,ns_1@10.242.238.88:<0.28527.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[506]}, {checkpoints,[{506,1}]}, {name,<<"rebalance_506">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[506]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"506"}]} [rebalance:info,2014-08-19T16:49:47.257,ns_1@10.242.238.88:<0.26355.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 761 on ns_1@10.242.238.88 [rebalance:debug,2014-08-19T16:49:47.259,ns_1@10.242.238.88:<0.28527.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28531.0> [rebalance:info,2014-08-19T16:49:47.259,ns_1@10.242.238.88:<0.26355.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:49:47.260,ns_1@10.242.238.88:<0.26115.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_508_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:49:47.260,ns_1@10.242.238.88:<0.26107.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:49:47.261,ns_1@10.242.238.88:<0.28527.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:47.263,ns_1@10.242.238.88:<0.28527.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [ns_server:debug,2014-08-19T16:49:47.263,ns_1@10.242.238.88:<0.26107.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 508 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.28543.0> [rebalance:info,2014-08-19T16:49:47.264,ns_1@10.242.238.88:<0.28527.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:49:47.264,ns_1@10.242.238.88:<0.28543.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 508 to state replica [rebalance:info,2014-08-19T16:49:47.264,ns_1@10.242.238.88:<0.26289.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 506 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:47.266,ns_1@10.242.238.88:<0.26297.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:47.271,ns_1@10.242.238.88:<0.26297.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_506_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:47.272,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 506 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:49:47.272,ns_1@10.242.238.88:<0.28555.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 506 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:49:47.284,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:47.286,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:47.286,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:47.286,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{506, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:47.294,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:47.303,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 506 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:47.303,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 506) [ns_server:debug,2014-08-19T16:49:47.304,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:47.305,ns_1@10.242.238.88:<0.26177.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 763 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:49:47.305,ns_1@10.242.238.88:<0.28565.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 510) [ns_server:info,2014-08-19T16:49:47.305,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 763 state to active [rebalance:info,2014-08-19T16:49:47.306,ns_1@10.242.238.88:<0.25911.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:49:47.306,ns_1@10.242.238.88:<0.26177.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 763 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:47.307,ns_1@10.242.238.88:<0.26177.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:49:47.309,ns_1@10.242.238.88:<0.25919.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_510_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:49:47.309,ns_1@10.242.238.88:<0.25911.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:49:47.311,ns_1@10.242.238.88:<0.25911.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 510 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.28572.0> [ns_server:info,2014-08-19T16:49:47.312,ns_1@10.242.238.88:<0.28572.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 510 to state replica [ns_server:debug,2014-08-19T16:49:47.313,ns_1@10.242.238.88:<0.28543.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_508 [rebalance:info,2014-08-19T16:49:47.318,ns_1@10.242.238.88:<0.28543.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[508]}, {checkpoints,[{508,1}]}, {name,<<"rebalance_508">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[508]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"508"}]} [rebalance:debug,2014-08-19T16:49:47.319,ns_1@10.242.238.88:<0.28543.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28573.0> [rebalance:info,2014-08-19T16:49:47.319,ns_1@10.242.238.88:<0.28543.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:49:47.321,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 945. Nacking mccouch update. [views:debug,2014-08-19T16:49:47.321,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/945. Updated state: active (0) [rebalance:debug,2014-08-19T16:49:47.321,ns_1@10.242.238.88:<0.28543.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [ns_server:debug,2014-08-19T16:49:47.321,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",945,active,0} [rebalance:info,2014-08-19T16:49:47.321,ns_1@10.242.238.88:<0.28543.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:47.322,ns_1@10.242.238.88:<0.26107.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 508 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:47.322,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258,1008, 748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007, 930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878,384,256, 1006,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200, 1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514,148,876,510, 382,1004,744,616,250,122,978,850,484,356,718,590,224,952,824,458,330,692,564, 198,1003,926,798,432,304,977,730,666,602,538,236,172,108,964,900,836,772,470, 406,342,278,951,704,640,576,512,210,146,1015,938,874,810,508,444,380,316, 1002,989,742,678,614,550,248,184,120,976,912,848,784,482,418,354,290,963,716, 652,588,524,222,158,950,886,822,456,392,328,264,1014,754,690,626,562,196,132, 1001,988,924,860,796,494,430,366,302,975,728,664,600,536,234,170,962,898,834, 770,468,404,340,276,949,766,702,638,574,208,144,1013,936,872,808,506,442,378, 314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961, 714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688,624,560, 194,130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896, 832,768,466,402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504, 440,376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 959,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686, 558,192,920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996, 868,502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322, 995,684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,760,632,138, 994,866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448, 320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,758,630, 136,992,864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812, 446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266,1016,756, 628,134,990,862,496,368] [rebalance:debug,2014-08-19T16:49:47.323,ns_1@10.242.238.88:<0.26115.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:47.328,ns_1@10.242.238.88:<0.26115.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_508_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:47.328,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 508 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:49:47.328,ns_1@10.242.238.88:<0.28577.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 508 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:info,2014-08-19T16:49:47.340,ns_1@10.242.238.88:<0.27616.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 748 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:49:47.340,ns_1@10.242.238.88:<0.25981.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 765 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:49:47.341,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 748 state to active [ns_server:debug,2014-08-19T16:49:47.342,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:47.342,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:47.342,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{508, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:47.346,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:47.347,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:47.347,ns_1@10.242.238.88:<0.27616.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 748 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:47.347,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 765 state to active [rebalance:info,2014-08-19T16:49:47.349,ns_1@10.242.238.88:<0.25981.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 765 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:47.350,ns_1@10.242.238.88:<0.27616.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:47.350,ns_1@10.242.238.88:<0.28572.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_510 [rebalance:info,2014-08-19T16:49:47.350,ns_1@10.242.238.88:<0.25981.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:47.356,ns_1@10.242.238.88:<0.28572.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[510]}, {checkpoints,[{510,1}]}, {name,<<"rebalance_510">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[510]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"510"}]} [rebalance:debug,2014-08-19T16:49:47.356,ns_1@10.242.238.88:<0.28572.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28594.0> [rebalance:info,2014-08-19T16:49:47.357,ns_1@10.242.238.88:<0.28572.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:47.359,ns_1@10.242.238.88:<0.28572.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:47.359,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 508 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:49:47.359,ns_1@10.242.238.88:<0.28572.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:49:47.360,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 508) [rebalance:info,2014-08-19T16:49:47.360,ns_1@10.242.238.88:<0.25911.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 510 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:47.361,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:debug,2014-08-19T16:49:47.366,ns_1@10.242.238.88:<0.25919.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:47.370,ns_1@10.242.238.88:<0.25919.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_510_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:47.371,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 510 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:49:47.371,ns_1@10.242.238.88:<0.28599.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 510 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [views:debug,2014-08-19T16:49:47.380,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/945. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:47.380,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",945,active,0} [ns_server:debug,2014-08-19T16:49:47.389,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:47.390,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{510, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:47.390,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:47.391,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:47.391,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:47.406,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 510 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:47.406,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 510) [ns_server:debug,2014-08-19T16:49:47.407,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:47.430,ns_1@10.242.238.88:<0.27407.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 750 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:49:47.430,ns_1@10.242.238.88:<0.27690.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1003 state change: {'ns_1@10.242.238.88',active,paused, undefined} [ns_server:info,2014-08-19T16:49:47.431,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 750 state to active [rebalance:info,2014-08-19T16:49:47.431,ns_1@10.242.238.88:<0.27407.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 750 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:47.432,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1003 state to active [rebalance:info,2014-08-19T16:49:47.433,ns_1@10.242.238.88:<0.27690.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1003 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:47.433,ns_1@10.242.238.88:<0.27407.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:47.433,ns_1@10.242.238.88:<0.27690.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:47.522,ns_1@10.242.238.88:<0.27239.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 752 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:49:47.522,ns_1@10.242.238.88:<0.27477.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1005 state change: {'ns_1@10.242.238.88',active,paused, undefined} [ns_server:info,2014-08-19T16:49:47.522,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 752 state to active [rebalance:info,2014-08-19T16:49:47.523,ns_1@10.242.238.88:<0.27239.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 752 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:47.524,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1005 state to active [rebalance:info,2014-08-19T16:49:47.525,ns_1@10.242.238.88:<0.27477.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1005 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:47.525,ns_1@10.242.238.88:<0.27239.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:47.525,ns_1@10.242.238.88:<0.27477.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:47.547,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 943. Nacking mccouch update. [views:debug,2014-08-19T16:49:47.547,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/943. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:47.547,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",943,active,0} [ns_server:debug,2014-08-19T16:49:47.548,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258,1008, 748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007, 930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878,384,256, 1006,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200, 1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514,148,876,510, 382,1004,744,616,250,122,978,850,484,356,718,590,224,952,824,458,330,692,564, 198,1003,926,798,432,304,977,730,666,602,538,236,172,108,964,900,836,772,470, 406,342,278,951,704,640,576,512,210,146,1015,938,874,810,508,444,380,316, 1002,989,742,678,614,550,248,184,120,976,912,848,784,482,418,354,290,963,716, 652,588,524,222,158,950,886,822,456,392,328,264,1014,754,690,626,562,196,132, 1001,988,924,860,796,494,430,366,302,975,728,664,600,536,234,170,962,898,834, 770,468,404,340,276,949,766,702,638,574,208,144,1013,936,872,808,506,442,378, 314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961, 714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688,624,560, 194,130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896, 832,768,466,402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504, 440,376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 959,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686, 558,192,920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996, 868,502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322, 995,684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632, 138,994,866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814, 448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,758, 630,136,992,864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940, 812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266,1016, 756,628,134,990,862,496,368] [views:debug,2014-08-19T16:49:47.630,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/943. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:47.631,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",943,active,0} [rebalance:info,2014-08-19T16:49:47.640,ns_1@10.242.238.88:<0.27038.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 754 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:49:47.640,ns_1@10.242.238.88:<0.27295.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1007 state change: {'ns_1@10.242.238.88',active,paused, undefined} [ns_server:info,2014-08-19T16:49:47.641,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 754 state to active [rebalance:info,2014-08-19T16:49:47.642,ns_1@10.242.238.88:<0.27038.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 754 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:47.642,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1007 state to active [rebalance:info,2014-08-19T16:49:47.643,ns_1@10.242.238.88:<0.27295.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1007 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:47.644,ns_1@10.242.238.88:<0.27038.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:47.644,ns_1@10.242.238.88:<0.27295.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:47.742,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 941. Nacking mccouch update. [views:debug,2014-08-19T16:49:47.742,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/941. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:47.742,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",941,active,0} [ns_server:debug,2014-08-19T16:49:47.743,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258,1008, 748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007, 930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878,384,256, 1006,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200, 1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514,148,876,510, 382,1004,744,616,250,122,978,850,484,356,718,590,224,952,824,458,330,692,564, 198,1003,926,798,432,304,977,730,666,602,538,236,172,108,964,900,836,772,470, 406,342,278,951,704,640,576,512,210,146,1015,938,874,810,508,444,380,316, 1002,989,742,678,614,550,248,184,120,976,912,848,784,482,418,354,290,963,716, 652,588,524,222,158,950,886,822,456,392,328,264,1014,754,690,626,562,196,132, 1001,988,924,860,796,494,430,366,302,975,728,664,600,536,234,170,962,898,834, 770,468,404,340,276,949,766,702,638,574,208,144,1013,936,872,808,506,442,378, 314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961, 714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688,624,560, 194,130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896, 832,768,466,402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504, 440,376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 959,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686, 558,192,920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996, 868,502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322, 995,684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632, 138,994,866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814, 448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941, 758,630,136,992,864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017, 940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266, 1016,756,628,134,990,862,496,368] [rebalance:info,2014-08-19T16:49:47.758,ns_1@10.242.238.88:<0.27113.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1009 state change: {'ns_1@10.242.238.88',active,paused, undefined} [rebalance:info,2014-08-19T16:49:47.758,ns_1@10.242.238.88:<0.26855.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 756 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:49:47.758,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1009 state to active [rebalance:info,2014-08-19T16:49:47.759,ns_1@10.242.238.88:<0.27113.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1009 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:47.759,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 756 state to active [rebalance:info,2014-08-19T16:49:47.760,ns_1@10.242.238.88:<0.26855.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 756 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:47.761,ns_1@10.242.238.88:<0.27113.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:47.761,ns_1@10.242.238.88:<0.26855.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:49:47.826,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/941. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:47.826,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",941,active,0} [rebalance:info,2014-08-19T16:49:47.868,ns_1@10.242.238.88:<0.26642.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 758 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:49:47.869,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 758 state to active [rebalance:info,2014-08-19T16:49:47.870,ns_1@10.242.238.88:<0.26912.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1011 state change: {'ns_1@10.242.238.88',active,paused, undefined} [rebalance:info,2014-08-19T16:49:47.871,ns_1@10.242.238.88:<0.26642.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 758 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:47.871,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1011 state to active [rebalance:info,2014-08-19T16:49:47.872,ns_1@10.242.238.88:<0.26912.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1011 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:47.873,ns_1@10.242.238.88:<0.26642.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:47.873,ns_1@10.242.238.88:<0.26912.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:47.909,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 939. Nacking mccouch update. [views:debug,2014-08-19T16:49:47.909,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/939. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:47.909,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",939,active,0} [ns_server:debug,2014-08-19T16:49:47.911,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258,1008, 748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007, 930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878,384,256, 1006,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200, 1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514,148,876,510, 382,1004,744,616,250,122,978,850,484,356,718,590,224,952,824,458,330,692,564, 198,1003,926,798,432,304,977,666,538,172,964,900,836,772,470,406,342,278,951, 704,640,576,512,210,146,1015,938,874,810,508,444,380,316,1002,989,742,678, 614,550,248,184,120,976,912,848,784,482,418,354,290,963,716,652,588,524,222, 158,950,886,822,456,392,328,264,1014,754,690,626,562,196,132,1001,988,924, 860,796,494,430,366,302,975,728,664,600,536,234,170,962,898,834,770,468,404, 340,276,949,766,702,638,574,208,144,1013,936,872,808,506,442,378,314,1000, 987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961,714,650, 586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688,624,560,194,130, 986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896,832,768, 466,402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504,440,376, 312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,712, 648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192, 920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502, 374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995,684, 556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994, 866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320, 993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941,758,630, 136,992,864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940,812, 446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266,1016,939, 756,628,134,990,862,496,368,730,602,236,108] [views:debug,2014-08-19T16:49:47.943,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/939. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:47.943,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",939,active,0} [ns_server:debug,2014-08-19T16:49:47.949,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,1002, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1002_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1002_'ns_1@10.242.238.90'">>}]}, {move_state,491, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_491_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_491_'ns_1@10.242.238.91'">>}]}, {move_state,747, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_747_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_747_'ns_1@10.242.238.91'">>}]}, {move_state,1003, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1003_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1003_'ns_1@10.242.238.90'">>}]}, {move_state,492, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_492_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_492_'ns_1@10.242.238.91'">>}]}, {move_state,748, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_748_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_748_'ns_1@10.242.238.91'">>}]}, {move_state,1004, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1004_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1004_'ns_1@10.242.238.90'">>}]}, {move_state,493, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_493_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_493_'ns_1@10.242.238.91'">>}]}, {move_state,749, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_749_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_749_'ns_1@10.242.238.91'">>}]}, {move_state,1005, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1005_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1005_'ns_1@10.242.238.90'">>}]}, {move_state,494, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_494_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_494_'ns_1@10.242.238.91'">>}]}, {move_state,750, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_750_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_750_'ns_1@10.242.238.91'">>}]}, {move_state,1006, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1006_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1006_'ns_1@10.242.238.90'">>}]}, {move_state,495, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_495_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_495_'ns_1@10.242.238.91'">>}]}, {move_state,751, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_751_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_751_'ns_1@10.242.238.91'">>}]}, {move_state,1007, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1007_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1007_'ns_1@10.242.238.90'">>}]}, {move_state,496, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_496_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_496_'ns_1@10.242.238.91'">>}]}, {move_state,752, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_752_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_752_'ns_1@10.242.238.91'">>}]}, {move_state,1008, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1008_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1008_'ns_1@10.242.238.90'">>}]}, {move_state,497, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_497_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_497_'ns_1@10.242.238.91'">>}]}, {move_state,753, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_753_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_753_'ns_1@10.242.238.91'">>}]}, {move_state,1009, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1009_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1009_'ns_1@10.242.238.90'">>}]}, {move_state,498, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_498_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_498_'ns_1@10.242.238.91'">>}]}, {move_state,754, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_754_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_754_'ns_1@10.242.238.91'">>}]}, {move_state,1010, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1010_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1010_'ns_1@10.242.238.90'">>}]}, {move_state,499, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_499_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_499_'ns_1@10.242.238.91'">>}]}, {move_state,755, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_755_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_755_'ns_1@10.242.238.91'">>}]}, {move_state,1011, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1011_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1011_'ns_1@10.242.238.90'">>}]}, {move_state,500, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_500_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_500_'ns_1@10.242.238.91'">>}]}, {move_state,756, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_756_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_756_'ns_1@10.242.238.91'">>}]}, {move_state,1012, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1012_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1012_'ns_1@10.242.238.90'">>}]}, {move_state,501, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_501_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_501_'ns_1@10.242.238.91'">>}]}, {move_state,757, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_757_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_757_'ns_1@10.242.238.91'">>}]}, {move_state,1013, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1013_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1013_'ns_1@10.242.238.90'">>}]}, {move_state,502, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_502_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_502_'ns_1@10.242.238.91'">>}]}, {move_state,758, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_758_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_758_'ns_1@10.242.238.91'">>}]}, {move_state,1014, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1014_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1014_'ns_1@10.242.238.90'">>}]}, {move_state,503, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_503_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_503_'ns_1@10.242.238.91'">>}]}, {move_state,759, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_759_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_759_'ns_1@10.242.238.91'">>}]}, {move_state,1015, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1015_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1015_'ns_1@10.242.238.90'">>}]}, {move_state,760, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_760_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_760_'ns_1@10.242.238.91'">>}]}, {move_state,1016, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1016_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1016_'ns_1@10.242.238.90'">>}]}, {move_state,505, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_505_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_505_'ns_1@10.242.238.91'">>}]}, {move_state,761, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_761_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_761_'ns_1@10.242.238.91'">>}]}, {move_state,1017, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1017_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1017_'ns_1@10.242.238.90'">>}]}, {move_state,762, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_762_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_762_'ns_1@10.242.238.91'">>}]}, {move_state,1018, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1018_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1018_'ns_1@10.242.238.90'">>}]}, {move_state,507, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_507_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_507_'ns_1@10.242.238.91'">>}]}, {move_state,763, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_763_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_763_'ns_1@10.242.238.91'">>}]}, {move_state,1019, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1019_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1019_'ns_1@10.242.238.90'">>}]}, {move_state,764, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_764_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_764_'ns_1@10.242.238.91'">>}]}, {move_state,1020, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1020_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1020_'ns_1@10.242.238.90'">>}]}, {move_state,509, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_509_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_509_'ns_1@10.242.238.91'">>}]}, {move_state,765, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_765_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_765_'ns_1@10.242.238.91'">>}]}, {move_state,1021, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1021_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1021_'ns_1@10.242.238.90'">>}]}] [ns_server:debug,2014-08-19T16:49:47.954,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1002, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:47.954,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 491, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.955,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 747, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1003, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:47.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 492, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 748, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1004, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:47.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 493, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 749, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1005, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:47.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 494, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 750, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1006, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:47.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 495, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 751, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1007, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:47.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 496, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 752, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1008, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:47.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 497, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 753, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1009, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:47.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 498, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 754, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [rebalance:info,2014-08-19T16:49:47.969,ns_1@10.242.238.88:<0.28717.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 760) [rebalance:info,2014-08-19T16:49:47.969,ns_1@10.242.238.88:<0.26712.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1013 state change: {'ns_1@10.242.238.88',active,paused, undefined} [ns_server:debug,2014-08-19T16:49:47.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1010, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:info,2014-08-19T16:49:47.969,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 1013 state to active [rebalance:info,2014-08-19T16:49:47.970,ns_1@10.242.238.88:<0.26447.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:47.971,ns_1@10.242.238.88:<0.26712.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1013 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:47.971,ns_1@10.242.238.88:<0.26712.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:47.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 499, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 755, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1011, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:info,2014-08-19T16:49:47.973,ns_1@10.242.238.88:<0.26455.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_760_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:47.973,ns_1@10.242.238.88:<0.26447.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:49:47.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 500, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 756, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1012, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:47.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 501, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.975,ns_1@10.242.238.88:<0.26447.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 760 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.28731.0> [ns_server:debug,2014-08-19T16:49:47.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 757, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:info,2014-08-19T16:49:47.976,ns_1@10.242.238.88:<0.28731.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 760 to state replica [ns_server:debug,2014-08-19T16:49:47.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1013, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:47.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 502, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 758, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1014, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:47.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 503, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 759, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1015, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:47.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 760, [{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1016, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:47.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 505, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 761, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1017, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:47.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 762, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1018, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:47.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 507, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 763, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.985,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1019, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:47.985,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 764, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.986,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1020, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:47.986,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 509, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.987,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 765, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:47.987,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1021, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [rebalance:info,2014-08-19T16:49:48.019,ns_1@10.242.238.88:<0.26503.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1015 state change: {'ns_1@10.242.238.88',active,paused, undefined} [rebalance:info,2014-08-19T16:49:48.019,ns_1@10.242.238.88:<0.28769.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 762) [ns_server:debug,2014-08-19T16:49:48.020,ns_1@10.242.238.88:<0.28731.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_760 [ns_server:info,2014-08-19T16:49:48.020,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 1015 state to active [rebalance:info,2014-08-19T16:49:48.020,ns_1@10.242.238.88:<0.26254.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:48.021,ns_1@10.242.238.88:<0.26503.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1015 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:48.021,ns_1@10.242.238.88:<0.28731.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[760]}, {checkpoints,[{760,1}]}, {name,<<"rebalance_760">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[760]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"760"}]} [rebalance:info,2014-08-19T16:49:48.022,ns_1@10.242.238.88:<0.26503.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:debug,2014-08-19T16:49:48.022,ns_1@10.242.238.88:<0.28731.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28774.0> [rebalance:info,2014-08-19T16:49:48.023,ns_1@10.242.238.88:<0.28731.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:49:48.023,ns_1@10.242.238.88:<0.26262.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_762_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:48.024,ns_1@10.242.238.88:<0.26254.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:debug,2014-08-19T16:49:48.025,ns_1@10.242.238.88:<0.28731.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.025,ns_1@10.242.238.88:<0.28731.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:48.026,ns_1@10.242.238.88:<0.26447.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 760 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:48.027,ns_1@10.242.238.88:<0.26254.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 762 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.28777.0> [rebalance:debug,2014-08-19T16:49:48.028,ns_1@10.242.238.88:<0.26455.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:48.028,ns_1@10.242.238.88:<0.28777.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 762 to state replica [ns_server:info,2014-08-19T16:49:48.030,ns_1@10.242.238.88:<0.26455.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_760_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.031,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 760 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:49:48.031,ns_1@10.242.238.88:<0.28781.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 760 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:49:48.043,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 937. Nacking mccouch update. [views:debug,2014-08-19T16:49:48.043,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/937. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:48.044,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",937,active,0} [ns_server:debug,2014-08-19T16:49:48.045,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258,1008, 748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007, 930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878,384,256, 1006,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200, 1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514,148,876,510, 382,1004,744,616,250,122,978,850,484,356,718,590,224,952,824,458,330,692,564, 198,1003,926,798,432,304,977,666,538,172,964,900,836,772,470,406,342,278,951, 704,640,576,512,210,146,1015,938,874,810,508,444,380,316,1002,989,742,678, 614,550,248,184,120,976,912,848,784,482,418,354,290,963,716,652,588,524,222, 158,950,886,822,456,392,328,264,1014,937,754,690,626,562,196,132,1001,988, 924,860,796,494,430,366,302,975,728,664,600,536,234,170,962,898,834,770,468, 404,340,276,949,766,702,638,574,208,144,1013,936,872,808,506,442,378,314, 1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961,714, 650,586,522,220,156,948,884,820,454,390,326,262,1012,999,752,688,624,560,194, 130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896,832, 768,466,402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504,440, 376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558, 192,920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868, 502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995, 684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138, 994,866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448, 320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941,758, 630,136,992,864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940, 812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266,1016, 939,756,628,134,990,862,496,368,730,602,236,108] [ns_server:debug,2014-08-19T16:49:48.047,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:48.048,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.048,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.048,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{760, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.049,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:48.055,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 760 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:48.057,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 760) [ns_server:debug,2014-08-19T16:49:48.057,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:49:48.070,ns_1@10.242.238.88:<0.28777.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_762 [rebalance:info,2014-08-19T16:49:48.071,ns_1@10.242.238.88:<0.28791.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 764) [rebalance:info,2014-08-19T16:49:48.071,ns_1@10.242.238.88:<0.26318.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1017 state change: {'ns_1@10.242.238.88',active,paused, undefined} [ns_server:info,2014-08-19T16:49:48.071,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 1017 state to active [rebalance:info,2014-08-19T16:49:48.072,ns_1@10.242.238.88:<0.26085.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:48.072,ns_1@10.242.238.88:<0.26318.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1017 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:48.072,ns_1@10.242.238.88:<0.28777.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[762]}, {checkpoints,[{762,1}]}, {name,<<"rebalance_762">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[762]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"762"}]} [rebalance:info,2014-08-19T16:49:48.073,ns_1@10.242.238.88:<0.26318.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:debug,2014-08-19T16:49:48.073,ns_1@10.242.238.88:<0.28777.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28796.0> [rebalance:info,2014-08-19T16:49:48.074,ns_1@10.242.238.88:<0.28777.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:48.075,ns_1@10.242.238.88:<0.28777.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.075,ns_1@10.242.238.88:<0.28777.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:49:48.076,ns_1@10.242.238.88:<0.26093.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_764_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:48.076,ns_1@10.242.238.88:<0.26085.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:49:48.076,ns_1@10.242.238.88:<0.26254.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 762 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:48.078,ns_1@10.242.238.88:<0.26262.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.078,ns_1@10.242.238.88:<0.26085.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 764 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.28799.0> [ns_server:info,2014-08-19T16:49:48.079,ns_1@10.242.238.88:<0.28799.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 764 to state replica [ns_server:info,2014-08-19T16:49:48.081,ns_1@10.242.238.88:<0.26262.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_762_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.081,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 762 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:49:48.081,ns_1@10.242.238.88:<0.28803.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 762 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:49:48.090,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.091,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{762, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.091,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:48.091,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.092,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:49:48.094,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/937. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:48.094,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",937,active,0} [rebalance:info,2014-08-19T16:49:48.101,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 762 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:48.101,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 762) [ns_server:debug,2014-08-19T16:49:48.102,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:48.105,ns_1@10.242.238.88:<0.27767.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1002 state change: {'ns_1@10.242.238.88',active,paused, undefined} [rebalance:info,2014-08-19T16:49:48.105,ns_1@10.242.238.88:<0.26142.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1019 state change: {'ns_1@10.242.238.88',active,paused, undefined} [ns_server:info,2014-08-19T16:49:48.107,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 1002 state to active [rebalance:info,2014-08-19T16:49:48.108,ns_1@10.242.238.88:<0.27767.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1002 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:48.108,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 1019 state to active [rebalance:info,2014-08-19T16:49:48.109,ns_1@10.242.238.88:<0.26142.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1019 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:48.109,ns_1@10.242.238.88:<0.27767.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:48.110,ns_1@10.242.238.88:<0.26142.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:48.123,ns_1@10.242.238.88:<0.28799.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_764 [rebalance:info,2014-08-19T16:49:48.124,ns_1@10.242.238.88:<0.28799.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[764]}, {checkpoints,[{764,1}]}, {name,<<"rebalance_764">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[764]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"764"}]} [rebalance:debug,2014-08-19T16:49:48.125,ns_1@10.242.238.88:<0.28799.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28821.0> [rebalance:info,2014-08-19T16:49:48.126,ns_1@10.242.238.88:<0.28799.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:48.128,ns_1@10.242.238.88:<0.28799.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.128,ns_1@10.242.238.88:<0.28799.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:48.129,ns_1@10.242.238.88:<0.26085.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 764 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:48.131,ns_1@10.242.238.88:<0.26093.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:48.134,ns_1@10.242.238.88:<0.26093.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_764_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.134,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 764 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:49:48.134,ns_1@10.242.238.88:<0.28839.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 764 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:49:48.147,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.148,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{764, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.149,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.149,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.149,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:49:48.156,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 764 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:48.157,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 764) [ns_server:debug,2014-08-19T16:49:48.158,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:49:48.169,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 935. Nacking mccouch update. [views:debug,2014-08-19T16:49:48.169,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/935. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:48.169,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",935,active,0} [ns_server:debug,2014-08-19T16:49:48.170,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009,932, 804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258,1008, 748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202,1007, 930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878,384,256, 1006,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566,200, 1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514,148,876,510, 382,1004,744,616,250,122,978,850,484,356,718,590,224,952,824,458,330,692,564, 198,1003,926,798,432,304,977,666,538,172,964,900,836,772,470,406,342,278,951, 704,640,576,512,210,146,1015,938,874,810,508,444,380,316,1002,989,742,678, 614,550,248,184,120,976,912,848,784,482,418,354,290,963,716,652,588,524,222, 158,950,886,822,456,392,328,264,1014,937,754,690,626,562,196,132,1001,988, 924,860,796,494,430,366,302,975,728,664,600,536,234,170,962,898,834,770,468, 404,340,276,949,766,702,638,574,208,144,1013,936,872,808,506,442,378,314, 1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961,714, 650,586,522,220,156,948,884,820,454,390,326,262,1012,999,935,752,688,624,560, 194,130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896, 832,768,466,402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504, 440,376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 959,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686, 558,192,920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996, 868,502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322, 995,684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632, 138,994,866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814, 448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941, 758,630,136,992,864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017, 940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266, 1016,939,756,628,134,990,862,496,368,730,602,236,108] [views:debug,2014-08-19T16:49:48.203,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/935. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:48.204,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",935,active,0} [rebalance:info,2014-08-19T16:49:48.230,ns_1@10.242.238.88:<0.25946.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1021 state change: {'ns_1@10.242.238.88',active,paused, undefined} [rebalance:info,2014-08-19T16:49:48.230,ns_1@10.242.238.88:<0.27587.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1004 state change: {'ns_1@10.242.238.88',active,paused, undefined} [ns_server:info,2014-08-19T16:49:48.231,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 1021 state to active [rebalance:info,2014-08-19T16:49:48.232,ns_1@10.242.238.88:<0.25946.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1021 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:48.232,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 1004 state to active [rebalance:info,2014-08-19T16:49:48.233,ns_1@10.242.238.88:<0.27587.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1004 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:48.234,ns_1@10.242.238.88:<0.25946.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:48.234,ns_1@10.242.238.88:<0.27587.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:48.286,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 933. Nacking mccouch update. [views:debug,2014-08-19T16:49:48.287,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/933. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:48.287,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",933,active,0} [ns_server:debug,2014-08-19T16:49:48.288,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009, 932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258, 1008,748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568,202, 1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878,384, 256,1006,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332,694,566, 200,1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514,148,876, 510,382,1004,744,616,250,122,978,850,484,356,718,590,224,952,824,458,330,692, 564,198,1003,926,798,432,304,977,666,538,172,964,900,836,772,470,406,342,278, 951,704,640,576,512,210,146,1015,938,874,810,508,444,380,316,1002,989,742, 678,614,550,248,184,120,976,912,848,784,482,418,354,290,963,716,652,588,524, 222,158,950,886,822,456,392,328,264,1014,937,754,690,626,562,196,132,1001, 988,924,860,796,494,430,366,302,975,728,664,600,536,234,170,962,898,834,770, 468,404,340,276,949,766,702,638,574,208,144,1013,936,872,808,506,442,378,314, 1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961,714, 650,586,522,220,156,948,884,820,454,390,326,262,1012,999,935,752,688,624,560, 194,130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896, 832,768,466,402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504, 440,376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 959,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686, 558,192,920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996, 868,502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322, 995,684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632, 138,994,866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814, 448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941, 758,630,136,992,864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017, 940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266, 1016,939,756,628,134,990,862,496,368,730,602,236,108] [rebalance:info,2014-08-19T16:49:48.356,ns_1@10.242.238.88:<0.28871.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 491) [rebalance:info,2014-08-19T16:49:48.356,ns_1@10.242.238.88:<0.27372.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1006 state change: {'ns_1@10.242.238.88',active,paused, undefined} [views:debug,2014-08-19T16:49:48.356,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/933. Updated state: active (0) [ns_server:info,2014-08-19T16:49:48.356,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 1006 state to active [ns_server:debug,2014-08-19T16:49:48.356,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",933,active,0} [rebalance:info,2014-08-19T16:49:48.357,ns_1@10.242.238.88:<0.27746.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:49:48.358,ns_1@10.242.238.88:<0.27372.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1006 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:48.359,ns_1@10.242.238.88:<0.27372.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:49:48.360,ns_1@10.242.238.88:<0.27754.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_491_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:49:48.360,ns_1@10.242.238.88:<0.27746.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:49:48.363,ns_1@10.242.238.88:<0.27746.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 491 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.28878.0> [ns_server:info,2014-08-19T16:49:48.364,ns_1@10.242.238.88:<0.28878.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 491 to state replica [ns_server:debug,2014-08-19T16:49:48.406,ns_1@10.242.238.88:<0.28878.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_491 [rebalance:info,2014-08-19T16:49:48.406,ns_1@10.242.238.88:<0.27204.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1008 state change: {'ns_1@10.242.238.88',active,paused, undefined} [rebalance:info,2014-08-19T16:49:48.406,ns_1@10.242.238.88:<0.28879.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 493) [ns_server:info,2014-08-19T16:49:48.406,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 1008 state to active [rebalance:info,2014-08-19T16:49:48.407,ns_1@10.242.238.88:<0.27547.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:49:48.407,ns_1@10.242.238.88:<0.28878.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[491]}, {checkpoints,[{491,1}]}, {name,<<"rebalance_491">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[491]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"491"}]} [rebalance:info,2014-08-19T16:49:48.408,ns_1@10.242.238.88:<0.27204.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1008 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:48.408,ns_1@10.242.238.88:<0.27204.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:debug,2014-08-19T16:49:48.409,ns_1@10.242.238.88:<0.28878.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28884.0> [ns_server:info,2014-08-19T16:49:48.409,ns_1@10.242.238.88:<0.27555.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_493_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:49:48.410,ns_1@10.242.238.88:<0.27547.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:49:48.410,ns_1@10.242.238.88:<0.28878.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:48.412,ns_1@10.242.238.88:<0.28878.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [ns_server:debug,2014-08-19T16:49:48.412,ns_1@10.242.238.88:<0.27547.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 493 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.28887.0> [rebalance:info,2014-08-19T16:49:48.412,ns_1@10.242.238.88:<0.28878.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:49:48.412,ns_1@10.242.238.88:<0.28887.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 493 to state replica [rebalance:info,2014-08-19T16:49:48.413,ns_1@10.242.238.88:<0.27746.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 491 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:48.414,ns_1@10.242.238.88:<0.27754.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:48.417,ns_1@10.242.238.88:<0.27754.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_491_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.417,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 491 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:49:48.417,ns_1@10.242.238.88:<0.28896.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 491 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:49:48.429,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.430,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.430,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{491, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.431,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.432,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:49:48.446,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 491 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:48.447,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 491) [ns_server:debug,2014-08-19T16:49:48.448,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:49:48.455,ns_1@10.242.238.88:<0.28887.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_493 [rebalance:info,2014-08-19T16:49:48.456,ns_1@10.242.238.88:<0.27003.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1010 state change: {'ns_1@10.242.238.88',active,paused, undefined} [rebalance:info,2014-08-19T16:49:48.456,ns_1@10.242.238.88:<0.28915.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 495) [rebalance:info,2014-08-19T16:49:48.456,ns_1@10.242.238.88:<0.28887.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[493]}, {checkpoints,[{493,1}]}, {name,<<"rebalance_493">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[493]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"493"}]} [ns_server:debug,2014-08-19T16:49:48.456,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 931. Nacking mccouch update. [views:debug,2014-08-19T16:49:48.456,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/931. Updated state: active (0) [ns_server:info,2014-08-19T16:49:48.456,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 1010 state to active [ns_server:debug,2014-08-19T16:49:48.456,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",931,active,0} [rebalance:debug,2014-08-19T16:49:48.457,ns_1@10.242.238.88:<0.28887.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28916.0> [rebalance:info,2014-08-19T16:49:48.457,ns_1@10.242.238.88:<0.27351.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:49:48.458,ns_1@10.242.238.88:<0.28887.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:49:48.458,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009, 932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258, 1008,931,748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568, 202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878, 384,256,1006,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332,694, 566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514,148, 876,510,382,1004,744,616,250,122,978,850,484,356,718,590,224,952,824,458,330, 692,564,198,1003,926,798,432,304,977,666,538,172,964,900,836,772,470,406,342, 278,951,704,640,576,512,210,146,1015,938,874,810,508,444,380,316,1002,989, 742,678,614,550,248,184,120,976,912,848,784,482,418,354,290,963,716,652,588, 524,222,158,950,886,822,456,392,328,264,1014,937,754,690,626,562,196,132, 1001,988,924,860,796,494,430,366,302,975,728,664,600,536,234,170,962,898,834, 770,468,404,340,276,949,766,702,638,574,208,144,1013,936,872,808,506,442,378, 314,1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961, 714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,935,752,688,624, 560,194,130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960, 896,832,768,466,402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806, 504,440,376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350, 286,959,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997, 686,558,192,920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140, 996,868,502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450, 322,995,684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760, 632,138,994,866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942, 814,448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018, 941,758,630,136,992,864,498,370,732,604,238,110,966,838,472,344,706,578,212, 1017,940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394, 266,1016,939,756,628,134,990,862,496,368,730,602,236,108] [rebalance:debug,2014-08-19T16:49:48.460,ns_1@10.242.238.88:<0.28887.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.460,ns_1@10.242.238.88:<0.28887.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:48.460,ns_1@10.242.238.88:<0.27003.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1010 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:48.461,ns_1@10.242.238.88:<0.27359.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_495_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:49:48.461,ns_1@10.242.238.88:<0.27003.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:48.461,ns_1@10.242.238.88:<0.27351.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:49:48.461,ns_1@10.242.238.88:<0.27547.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 493 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:48.463,ns_1@10.242.238.88:<0.27555.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.463,ns_1@10.242.238.88:<0.27351.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 495 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.28923.0> [ns_server:info,2014-08-19T16:49:48.464,ns_1@10.242.238.88:<0.28923.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 495 to state replica [ns_server:info,2014-08-19T16:49:48.466,ns_1@10.242.238.88:<0.27555.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_493_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.466,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 493 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:49:48.466,ns_1@10.242.238.88:<0.28927.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 493 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:49:48.479,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.480,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.480,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:48.480,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{493, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.481,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:48.489,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 493 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:48.490,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 493) [ns_server:debug,2014-08-19T16:49:48.491,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [views:debug,2014-08-19T16:49:48.491,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/931. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:48.491,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",931,active,0} [ns_server:debug,2014-08-19T16:49:48.507,ns_1@10.242.238.88:<0.28923.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_495 [rebalance:info,2014-08-19T16:49:48.509,ns_1@10.242.238.88:<0.28923.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[495]}, {checkpoints,[{495,1}]}, {name,<<"rebalance_495">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[495]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"495"}]} [rebalance:debug,2014-08-19T16:49:48.509,ns_1@10.242.238.88:<0.28923.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28937.0> [rebalance:info,2014-08-19T16:49:48.510,ns_1@10.242.238.88:<0.28923.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:48.512,ns_1@10.242.238.88:<0.28923.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.512,ns_1@10.242.238.88:<0.28923.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:48.513,ns_1@10.242.238.88:<0.27351.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 495 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:info,2014-08-19T16:49:48.514,ns_1@10.242.238.88:<0.26834.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1012 state change: {'ns_1@10.242.238.88',active,paused, undefined} [rebalance:info,2014-08-19T16:49:48.515,ns_1@10.242.238.88:<0.28938.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 497) [ns_server:info,2014-08-19T16:49:48.515,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1012 state to active [rebalance:debug,2014-08-19T16:49:48.515,ns_1@10.242.238.88:<0.27359.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:49:48.515,ns_1@10.242.238.88:<0.27169.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:49:48.516,ns_1@10.242.238.88:<0.26834.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1012 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:48.517,ns_1@10.242.238.88:<0.26834.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:49:48.518,ns_1@10.242.238.88:<0.27359.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_495_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.518,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 495 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [ns_server:info,2014-08-19T16:49:48.519,ns_1@10.242.238.88:<0.27177.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_497_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:49:48.519,ns_1@10.242.238.88:<0.28948.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 495 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:info,2014-08-19T16:49:48.519,ns_1@10.242.238.88:<0.27169.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:49:48.521,ns_1@10.242.238.88:<0.27169.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 497 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.28949.0> [ns_server:info,2014-08-19T16:49:48.521,ns_1@10.242.238.88:<0.28949.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 497 to state replica [ns_server:debug,2014-08-19T16:49:48.531,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.532,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.532,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{495, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.532,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:48.533,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:48.544,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 495 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:48.545,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 495) [ns_server:debug,2014-08-19T16:49:48.546,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:48.548,ns_1@10.242.238.88:<0.28959.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 499) [rebalance:info,2014-08-19T16:49:48.548,ns_1@10.242.238.88:<0.26607.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1014 state change: {'ns_1@10.242.238.88',active,paused, undefined} [ns_server:info,2014-08-19T16:49:48.549,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1014 state to active [rebalance:info,2014-08-19T16:49:48.549,ns_1@10.242.238.88:<0.26968.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:49:48.550,ns_1@10.242.238.88:<0.26607.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1014 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:48.550,ns_1@10.242.238.88:<0.26607.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:49:48.552,ns_1@10.242.238.88:<0.26976.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_499_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:49:48.552,ns_1@10.242.238.88:<0.26968.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:49:48.555,ns_1@10.242.238.88:<0.26968.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 499 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.28979.0> [ns_server:info,2014-08-19T16:49:48.556,ns_1@10.242.238.88:<0.28979.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 499 to state replica [ns_server:debug,2014-08-19T16:49:48.562,ns_1@10.242.238.88:<0.28949.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_497 [rebalance:info,2014-08-19T16:49:48.563,ns_1@10.242.238.88:<0.28949.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[497]}, {checkpoints,[{497,1}]}, {name,<<"rebalance_497">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[497]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"497"}]} [rebalance:debug,2014-08-19T16:49:48.564,ns_1@10.242.238.88:<0.28949.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28981.0> [rebalance:info,2014-08-19T16:49:48.565,ns_1@10.242.238.88:<0.28949.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:48.567,ns_1@10.242.238.88:<0.28949.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.567,ns_1@10.242.238.88:<0.28949.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:48.568,ns_1@10.242.238.88:<0.27169.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 497 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:48.569,ns_1@10.242.238.88:<0.27177.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:48.572,ns_1@10.242.238.88:<0.27177.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_497_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.572,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 497 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:49:48.572,ns_1@10.242.238.88:<0.28985.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 497 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:49:48.585,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.586,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:48.586,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{497, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.588,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.589,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.597,ns_1@10.242.238.88:<0.28979.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_499 [rebalance:info,2014-08-19T16:49:48.599,ns_1@10.242.238.88:<0.28979.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[499]}, {checkpoints,[{499,1}]}, {name,<<"rebalance_499">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[499]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"499"}]} [rebalance:info,2014-08-19T16:49:48.600,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 497 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:48.607,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 929. Nacking mccouch update. [views:debug,2014-08-19T16:49:48.607,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/929. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:48.607,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 497) [rebalance:debug,2014-08-19T16:49:48.607,ns_1@10.242.238.88:<0.28979.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.28995.0> [ns_server:debug,2014-08-19T16:49:48.607,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",929,active,0} [rebalance:info,2014-08-19T16:49:48.608,ns_1@10.242.238.88:<0.28996.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 501) [rebalance:info,2014-08-19T16:49:48.608,ns_1@10.242.238.88:<0.28997.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1016) [rebalance:info,2014-08-19T16:49:48.608,ns_1@10.242.238.88:<0.28998.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 503) [rebalance:info,2014-08-19T16:49:48.608,ns_1@10.242.238.88:<0.28979.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:49:48.608,ns_1@10.242.238.88:<0.28999.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1018) [ns_server:debug,2014-08-19T16:49:48.609,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:48.609,ns_1@10.242.238.88:<0.29000.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 492) [rebalance:info,2014-08-19T16:49:48.609,ns_1@10.242.238.88:<0.29001.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 509) [rebalance:info,2014-08-19T16:49:48.609,ns_1@10.242.238.88:<0.29002.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 747) [rebalance:info,2014-08-19T16:49:48.609,ns_1@10.242.238.88:<0.26799.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [ns_server:debug,2014-08-19T16:49:48.609,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009, 932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258, 1008,931,748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568, 202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878, 384,256,1006,929,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332, 694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514, 148,876,510,382,1004,744,616,250,122,978,850,484,356,718,590,224,952,824,458, 330,692,564,198,1003,926,798,432,304,977,666,538,172,900,772,406,278,951,704, 640,576,512,210,146,1015,938,874,810,508,444,380,316,1002,989,742,678,614, 550,248,184,120,976,912,848,784,482,418,354,290,963,716,652,588,524,222,158, 950,886,822,456,392,328,264,1014,937,754,690,626,562,196,132,1001,988,924, 860,796,494,430,366,302,975,728,664,600,536,234,170,962,898,834,770,468,404, 340,276,949,766,702,638,574,208,144,1013,936,872,808,506,442,378,314,1000, 987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961,714,650, 586,522,220,156,948,884,820,454,390,326,262,1012,999,935,752,688,624,560,194, 130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896,832, 768,466,402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504,440, 376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558, 192,920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868, 502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995, 684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138, 994,866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448, 320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941,758, 630,136,992,864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940, 812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266,1016, 939,756,628,134,990,862,496,368,730,602,236,108,964,836,470,342] [rebalance:info,2014-08-19T16:49:48.609,ns_1@10.242.238.88:<0.29003.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 507) [rebalance:info,2014-08-19T16:49:48.609,ns_1@10.242.238.88:<0.26572.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:49:48.609,ns_1@10.242.238.88:<0.29004.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1020) [rebalance:info,2014-08-19T16:49:48.609,ns_1@10.242.238.88:<0.29005.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 505) [rebalance:info,2014-08-19T16:49:48.609,ns_1@10.242.238.88:<0.27654.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:49:48.610,ns_1@10.242.238.88:<0.29006.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 496) [rebalance:info,2014-08-19T16:49:48.610,ns_1@10.242.238.88:<0.29007.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 494) [rebalance:info,2014-08-19T16:49:48.610,ns_1@10.242.238.88:<0.26007.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:49:48.610,ns_1@10.242.238.88:<0.29008.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 751) [rebalance:info,2014-08-19T16:49:48.610,ns_1@10.242.238.88:<0.29009.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 498) [rebalance:debug,2014-08-19T16:49:48.611,ns_1@10.242.238.88:<0.28979.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.611,ns_1@10.242.238.88:<0.26198.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:49:48.611,ns_1@10.242.238.88:<0.27711.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:48.611,ns_1@10.242.238.88:<0.29010.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 763) [rebalance:info,2014-08-19T16:49:48.611,ns_1@10.242.238.88:<0.29011.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 500) [rebalance:info,2014-08-19T16:49:48.611,ns_1@10.242.238.88:<0.29016.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 753) [rebalance:info,2014-08-19T16:49:48.611,ns_1@10.242.238.88:<0.29014.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 749) [rebalance:info,2014-08-19T16:49:48.611,ns_1@10.242.238.88:<0.29012.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 757) [rebalance:info,2014-08-19T16:49:48.611,ns_1@10.242.238.88:<0.27316.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:48.611,ns_1@10.242.238.88:<0.29017.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 755) [rebalance:info,2014-08-19T16:49:48.611,ns_1@10.242.238.88:<0.28979.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:48.611,ns_1@10.242.238.88:<0.29015.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 759) [rebalance:info,2014-08-19T16:49:48.611,ns_1@10.242.238.88:<0.29013.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 502) [rebalance:info,2014-08-19T16:49:48.611,ns_1@10.242.238.88:<0.26391.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:49:48.611,ns_1@10.242.238.88:<0.27260.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:49:48.611,ns_1@10.242.238.88:<0.27442.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:49:48.611,ns_1@10.242.238.88:<0.27078.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:49:48.612,ns_1@10.242.238.88:<0.29018.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 765) [rebalance:info,2014-08-19T16:49:48.612,ns_1@10.242.238.88:<0.29019.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 748) [rebalance:info,2014-08-19T16:49:48.612,ns_1@10.242.238.88:<0.29020.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 752) [rebalance:info,2014-08-19T16:49:48.612,ns_1@10.242.238.88:<0.26891.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:49:48.612,ns_1@10.242.238.88:<0.29022.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 754) [rebalance:info,2014-08-19T16:49:48.612,ns_1@10.242.238.88:<0.29021.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 761) [rebalance:info,2014-08-19T16:49:48.612,ns_1@10.242.238.88:<0.29023.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 750) [rebalance:info,2014-08-19T16:49:48.612,ns_1@10.242.238.88:<0.29024.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 756) [rebalance:info,2014-08-19T16:49:48.612,ns_1@10.242.238.88:<0.29025.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1003) [rebalance:info,2014-08-19T16:49:48.612,ns_1@10.242.238.88:<0.29026.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 758) [rebalance:info,2014-08-19T16:49:48.612,ns_1@10.242.238.88:<0.26177.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:48.612,ns_1@10.242.238.88:<0.29027.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1009) [rebalance:info,2014-08-19T16:49:48.612,ns_1@10.242.238.88:<0.29028.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1007) [rebalance:info,2014-08-19T16:49:48.612,ns_1@10.242.238.88:<0.29029.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1005) [rebalance:info,2014-08-19T16:49:48.612,ns_1@10.242.238.88:<0.26677.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:49:48.612,ns_1@10.242.238.88:<0.26968.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 499 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:info,2014-08-19T16:49:48.612,ns_1@10.242.238.88:<0.29030.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1015) [rebalance:info,2014-08-19T16:49:48.612,ns_1@10.242.238.88:<0.27512.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:48.613,ns_1@10.242.238.88:<0.29032.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1002) [rebalance:info,2014-08-19T16:49:48.613,ns_1@10.242.238.88:<0.29031.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1011) [rebalance:info,2014-08-19T16:49:48.613,ns_1@10.242.238.88:<0.29033.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1004) [rebalance:info,2014-08-19T16:49:48.613,ns_1@10.242.238.88:<0.29034.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1017) [rebalance:info,2014-08-19T16:49:48.613,ns_1@10.242.238.88:<0.29035.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1013) [rebalance:info,2014-08-19T16:49:48.613,ns_1@10.242.238.88:<0.27148.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:48.613,ns_1@10.242.238.88:<0.26778.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:48.613,ns_1@10.242.238.88:<0.29037.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1021) [rebalance:info,2014-08-19T16:49:48.613,ns_1@10.242.238.88:<0.29038.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1010) [rebalance:info,2014-08-19T16:49:48.613,ns_1@10.242.238.88:<0.29039.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1019) [rebalance:info,2014-08-19T16:49:48.613,ns_1@10.242.238.88:<0.29040.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1006) [rebalance:info,2014-08-19T16:49:48.613,ns_1@10.242.238.88:<0.29042.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1008) [rebalance:info,2014-08-19T16:49:48.613,ns_1@10.242.238.88:<0.26543.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:48.613,ns_1@10.242.238.88:<0.29043.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1012) [rebalance:info,2014-08-19T16:49:48.613,ns_1@10.242.238.88:<0.26412.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:49:48.613,ns_1@10.242.238.88:<0.26807.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_501_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:49:48.614,ns_1@10.242.238.88:<0.29046.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1014) [rebalance:info,2014-08-19T16:49:48.614,ns_1@10.242.238.88:<0.25981.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:48.614,ns_1@10.242.238.88:<0.26233.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:49:48.614,ns_1@10.242.238.88:<0.26947.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [ns_server:info,2014-08-19T16:49:48.614,ns_1@10.242.238.88:<0.26580.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_503_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:49:48.614,ns_1@10.242.238.88:<0.26799.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:49:48.614,ns_1@10.242.238.88:<0.27690.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:49:48.614,ns_1@10.242.238.88:<0.27616.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:48.614,ns_1@10.242.238.88:<0.26572.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:49:48.614,ns_1@10.242.238.88:<0.26042.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:49:48.614,ns_1@10.242.238.88:<0.27662.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_492_'ns_1@10.242.238.89'">>] [rebalance:debug,2014-08-19T16:49:48.614,ns_1@10.242.238.88:<0.26976.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:49:48.614,ns_1@10.242.238.88:<0.27239.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:48.614,ns_1@10.242.238.88:<0.26355.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:48.614,ns_1@10.242.238.88:<0.27113.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:49:48.614,ns_1@10.242.238.88:<0.26015.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_509_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:49:48.614,ns_1@10.242.238.88:<0.27295.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:49:48.614,ns_1@10.242.238.88:<0.27654.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:49:48.615,ns_1@10.242.238.88:<0.27038.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:48.615,ns_1@10.242.238.88:<0.27477.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:49:48.615,ns_1@10.242.238.88:<0.26007.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:49:48.615,ns_1@10.242.238.88:<0.27407.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:48.615,ns_1@10.242.238.88:<0.26855.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:48.615,ns_1@10.242.238.88:<0.26503.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:49:48.615,ns_1@10.242.238.88:<0.26642.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:49:48.615,ns_1@10.242.238.88:<0.27767.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:49:48.616,ns_1@10.242.238.88:<0.26912.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:49:48.616,ns_1@10.242.238.88:<0.27587.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:49:48.616,ns_1@10.242.238.88:<0.26318.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:49:48.616,ns_1@10.242.238.88:<0.26206.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_507_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:49:48.617,ns_1@10.242.238.88:<0.26712.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:49:48.617,ns_1@10.242.238.88:<0.26198.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:49:48.617,ns_1@10.242.238.88:<0.25946.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:49:48.617,ns_1@10.242.238.88:<0.27003.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:49:48.617,ns_1@10.242.238.88:<0.27268.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_496_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:49:48.617,ns_1@10.242.238.88:<0.26142.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:49:48.617,ns_1@10.242.238.88:<0.27260.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:49:48.617,ns_1@10.242.238.88:<0.27372.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:49:48.618,ns_1@10.242.238.88:<0.26399.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_505_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:49:48.618,ns_1@10.242.238.88:<0.26391.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:49:48.618,ns_1@10.242.238.88:<0.27204.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:49:48.618,ns_1@10.242.238.88:<0.26834.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:49:48.618,ns_1@10.242.238.88:<0.26607.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:49:48.618,ns_1@10.242.238.88:<0.27086.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_498_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:49:48.618,ns_1@10.242.238.88:<0.27078.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.618,ns_1@10.242.238.88:<0.27450.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_494_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:49:48.619,ns_1@10.242.238.88:<0.27442.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.619,ns_1@10.242.238.88:<0.27324.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_751_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:48.619,ns_1@10.242.238.88:<0.27316.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.620,ns_1@10.242.238.88:<0.27719.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_747_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:48.620,ns_1@10.242.238.88:<0.27711.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.621,ns_1@10.242.238.88:<0.26899.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_500_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:49:48.621,ns_1@10.242.238.88:<0.26891.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.623,ns_1@10.242.238.88:<0.26685.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_502_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:49:48.623,ns_1@10.242.238.88:<0.26677.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.625,ns_1@10.242.238.88:<0.26185.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_763_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:48.625,ns_1@10.242.238.88:<0.26177.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.626,ns_1@10.242.238.88:<0.27520.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_749_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:48.626,ns_1@10.242.238.88:<0.27512.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.627,ns_1@10.242.238.88:<0.27156.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_753_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:48.627,ns_1@10.242.238.88:<0.27148.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.629,ns_1@10.242.238.88:<0.26786.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_757_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:48.629,ns_1@10.242.238.88:<0.26778.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.629,ns_1@10.242.238.88:<0.26551.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_759_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:48.630,ns_1@10.242.238.88:<0.26543.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.635,ns_1@10.242.238.88:<0.26428.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1016_'ns_1@10.242.238.91'">>] [ns_server:info,2014-08-19T16:49:48.635,ns_1@10.242.238.88:<0.27624.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_748_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:48.635,ns_1@10.242.238.88:<0.26412.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:49:48.635,ns_1@10.242.238.88:<0.27616.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.635,ns_1@10.242.238.88:<0.26955.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_755_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:48.635,ns_1@10.242.238.88:<0.26947.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.636,ns_1@10.242.238.88:<0.25989.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_765_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:48.636,ns_1@10.242.238.88:<0.25981.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.636,ns_1@10.242.238.88:<0.26241.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1018_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.636,ns_1@10.242.238.88:<0.26233.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.636,ns_1@10.242.238.88:<0.27698.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1003_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.636,ns_1@10.242.238.88:<0.27690.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.637,ns_1@10.242.238.88:<0.27046.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_754_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:48.637,ns_1@10.242.238.88:<0.27038.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.637,ns_1@10.242.238.88:<0.26050.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1020_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.637,ns_1@10.242.238.88:<0.26042.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.637,ns_1@10.242.238.88:<0.26363.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_761_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:48.637,ns_1@10.242.238.88:<0.26355.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.637,ns_1@10.242.238.88:<0.27247.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_752_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:48.637,ns_1@10.242.238.88:<0.27239.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.638,ns_1@10.242.238.88:<0.27121.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1009_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.638,ns_1@10.242.238.88:<0.27113.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.638,ns_1@10.242.238.88:<0.26863.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_756_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:48.638,ns_1@10.242.238.88:<0.26855.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.639,ns_1@10.242.238.88:<0.27415.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_750_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:48.639,ns_1@10.242.238.88:<0.27407.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.640,ns_1@10.242.238.88:<0.26976.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_499_'ns_1@10.242.238.91'">>] [ns_server:info,2014-08-19T16:49:48.640,ns_1@10.242.238.88:<0.26650.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_758_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:48.640,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 499 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:49:48.640,ns_1@10.242.238.88:<0.26642.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:49:48.640,ns_1@10.242.238.88:<0.29116.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 499 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:info,2014-08-19T16:49:48.640,ns_1@10.242.238.88:<0.27485.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1005_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.641,ns_1@10.242.238.88:<0.27477.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.641,ns_1@10.242.238.88:<0.27303.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1007_'ns_1@10.242.238.91'">>] [ns_server:info,2014-08-19T16:49:48.641,ns_1@10.242.238.88:<0.26511.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1015_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.641,ns_1@10.242.238.88:<0.27295.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:49:48.641,ns_1@10.242.238.88:<0.26503.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.641,ns_1@10.242.238.88:<0.27775.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1002_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.641,ns_1@10.242.238.88:<0.27767.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.642,ns_1@10.242.238.88:<0.26920.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1011_'ns_1@10.242.238.91'">>] [ns_server:info,2014-08-19T16:49:48.642,ns_1@10.242.238.88:<0.27595.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1004_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.642,ns_1@10.242.238.88:<0.26912.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:49:48.642,ns_1@10.242.238.88:<0.27587.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.642,ns_1@10.242.238.88:<0.26326.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1017_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.642,ns_1@10.242.238.88:<0.26318.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.643,ns_1@10.242.238.88:<0.26720.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1013_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.643,ns_1@10.242.238.88:<0.26712.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.644,ns_1@10.242.238.88:<0.25954.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1021_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.644,ns_1@10.242.238.88:<0.25946.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.644,ns_1@10.242.238.88:<0.27011.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1010_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.644,ns_1@10.242.238.88:<0.27003.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:49:48.644,ns_1@10.242.238.88:<0.26572.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 503 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.29079.0> [ns_server:info,2014-08-19T16:49:48.645,ns_1@10.242.238.88:<0.26150.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1019_'ns_1@10.242.238.91'">>] [ns_server:info,2014-08-19T16:49:48.645,ns_1@10.242.238.88:<0.29079.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 503 to state replica [ns_server:info,2014-08-19T16:49:48.646,ns_1@10.242.238.88:<0.27380.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1006_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.646,ns_1@10.242.238.88:<0.27372.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:49:48.645,ns_1@10.242.238.88:<0.26142.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:49:48.646,ns_1@10.242.238.88:<0.26799.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 501 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.29080.0> [ns_server:info,2014-08-19T16:49:48.647,ns_1@10.242.238.88:<0.29080.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 501 to state replica [ns_server:info,2014-08-19T16:49:48.647,ns_1@10.242.238.88:<0.26842.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1012_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.647,ns_1@10.242.238.88:<0.26834.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:49:48.648,ns_1@10.242.238.88:<0.27654.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 492 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.29081.0> [ns_server:info,2014-08-19T16:49:48.648,ns_1@10.242.238.88:<0.27212.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1008_'ns_1@10.242.238.91'">>] [ns_server:info,2014-08-19T16:49:48.648,ns_1@10.242.238.88:<0.26615.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1014_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:49:48.648,ns_1@10.242.238.88:<0.26007.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 509 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.29082.0> [rebalance:info,2014-08-19T16:49:48.648,ns_1@10.242.238.88:<0.27204.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:49:48.650,ns_1@10.242.238.88:<0.26607.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:49:48.652,ns_1@10.242.238.88:<0.29081.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 492 to state replica [ns_server:info,2014-08-19T16:49:48.652,ns_1@10.242.238.88:<0.29082.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 509 to state replica [ns_server:debug,2014-08-19T16:49:48.659,ns_1@10.242.238.88:<0.26198.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 507 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.29125.0> [ns_server:info,2014-08-19T16:49:48.660,ns_1@10.242.238.88:<0.29125.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 507 to state replica [ns_server:debug,2014-08-19T16:49:48.661,ns_1@10.242.238.88:<0.27260.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 496 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.29135.0> [ns_server:info,2014-08-19T16:49:48.662,ns_1@10.242.238.88:<0.29135.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 496 to state replica [ns_server:debug,2014-08-19T16:49:48.663,ns_1@10.242.238.88:<0.26391.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 505 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.29141.0> [ns_server:info,2014-08-19T16:49:48.664,ns_1@10.242.238.88:<0.29141.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 505 to state replica [ns_server:debug,2014-08-19T16:49:48.666,ns_1@10.242.238.88:<0.27442.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 494 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.29148.0> [ns_server:debug,2014-08-19T16:49:48.667,ns_1@10.242.238.88:<0.27078.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 498 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.29149.0> [ns_server:info,2014-08-19T16:49:48.668,ns_1@10.242.238.88:<0.29148.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 494 to state replica [ns_server:debug,2014-08-19T16:49:48.668,ns_1@10.242.238.88:<0.27316.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 751 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.29150.0> [ns_server:info,2014-08-19T16:49:48.669,ns_1@10.242.238.88:<0.29149.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 498 to state replica [ns_server:debug,2014-08-19T16:49:48.669,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:49:48.669,ns_1@10.242.238.88:<0.29150.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 751 to state replica [ns_server:debug,2014-08-19T16:49:48.669,ns_1@10.242.238.88:<0.27711.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 747 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.29151.0> [ns_server:debug,2014-08-19T16:49:48.670,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{499, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.678,ns_1@10.242.238.88:<0.26891.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 500 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.29152.0> [ns_server:info,2014-08-19T16:49:48.678,ns_1@10.242.238.88:<0.29151.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 747 to state replica [ns_server:debug,2014-08-19T16:49:48.678,ns_1@10.242.238.88:<0.26677.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 502 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.29153.0> [ns_server:info,2014-08-19T16:49:48.679,ns_1@10.242.238.88:<0.29152.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 500 to state replica [ns_server:info,2014-08-19T16:49:48.679,ns_1@10.242.238.88:<0.29153.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 502 to state replica [ns_server:debug,2014-08-19T16:49:48.684,ns_1@10.242.238.88:<0.26177.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 763 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.29154.0> [views:debug,2014-08-19T16:49:48.686,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/929. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:48.686,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",929,active,0} [ns_server:debug,2014-08-19T16:49:48.687,ns_1@10.242.238.88:<0.27512.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 749 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.29155.0> [ns_server:info,2014-08-19T16:49:48.687,ns_1@10.242.238.88:<0.29154.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 763 to state replica [ns_server:info,2014-08-19T16:49:48.688,ns_1@10.242.238.88:<0.29155.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 749 to state replica [ns_server:debug,2014-08-19T16:49:48.692,ns_1@10.242.238.88:<0.27148.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 753 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.29156.0> [ns_server:debug,2014-08-19T16:49:48.692,ns_1@10.242.238.88:<0.26543.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 759 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.29157.0> [ns_server:debug,2014-08-19T16:49:48.692,ns_1@10.242.238.88:<0.26778.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 757 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.29158.0> [ns_server:info,2014-08-19T16:49:48.697,ns_1@10.242.238.88:<0.29156.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 753 to state replica [ns_server:info,2014-08-19T16:49:48.697,ns_1@10.242.238.88:<0.29157.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 759 to state replica [ns_server:info,2014-08-19T16:49:48.697,ns_1@10.242.238.88:<0.29158.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 757 to state replica [ns_server:debug,2014-08-19T16:49:48.700,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:48.701,ns_1@10.242.238.88:<0.29079.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_503 [ns_server:debug,2014-08-19T16:49:48.702,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.703,ns_1@10.242.238.88:<0.27616.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 748 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.29162.0> [ns_server:debug,2014-08-19T16:49:48.703,ns_1@10.242.238.88:<0.26412.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1016 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.29163.0> [ns_server:debug,2014-08-19T16:49:48.703,ns_1@10.242.238.88:<0.26947.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 755 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.29164.0> [ns_server:debug,2014-08-19T16:49:48.703,ns_1@10.242.238.88:<0.27690.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1003 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.29166.0> [ns_server:debug,2014-08-19T16:49:48.703,ns_1@10.242.238.88:<0.25981.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 765 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.29165.0> [ns_server:debug,2014-08-19T16:49:48.703,ns_1@10.242.238.88:<0.26233.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1018 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.29167.0> [ns_server:debug,2014-08-19T16:49:48.703,ns_1@10.242.238.88:<0.26042.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1020 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.29168.0> [ns_server:debug,2014-08-19T16:49:48.703,ns_1@10.242.238.88:<0.27038.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 754 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.29169.0> [ns_server:debug,2014-08-19T16:49:48.703,ns_1@10.242.238.88:<0.26355.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 761 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.29170.0> [ns_server:debug,2014-08-19T16:49:48.703,ns_1@10.242.238.88:<0.27239.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 752 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.29171.0> [ns_server:debug,2014-08-19T16:49:48.703,ns_1@10.242.238.88:<0.26855.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 756 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.29172.0> [ns_server:debug,2014-08-19T16:49:48.704,ns_1@10.242.238.88:<0.27407.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 750 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.29173.0> [ns_server:debug,2014-08-19T16:49:48.704,ns_1@10.242.238.88:<0.27113.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1009 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.29174.0> [ns_server:debug,2014-08-19T16:49:48.704,ns_1@10.242.238.88:<0.27295.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1007 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.29175.0> [ns_server:debug,2014-08-19T16:49:48.704,ns_1@10.242.238.88:<0.26642.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 758 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.29176.0> [ns_server:debug,2014-08-19T16:49:48.708,ns_1@10.242.238.88:<0.27767.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1002 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.29179.0> [ns_server:debug,2014-08-19T16:49:48.708,ns_1@10.242.238.88:<0.26912.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1011 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.29180.0> [ns_server:debug,2014-08-19T16:49:48.708,ns_1@10.242.238.88:<0.27587.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1004 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.29181.0> [ns_server:debug,2014-08-19T16:49:48.708,ns_1@10.242.238.88:<0.26318.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1017 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.29182.0> [rebalance:info,2014-08-19T16:49:48.708,ns_1@10.242.238.88:<0.29079.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[503]}, {checkpoints,[{503,1}]}, {name,<<"rebalance_503">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[503]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"503"}]} [ns_server:debug,2014-08-19T16:49:48.708,ns_1@10.242.238.88:<0.26503.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1015 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.29177.0> [ns_server:debug,2014-08-19T16:49:48.709,ns_1@10.242.238.88:<0.27477.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1005 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.29178.0> [ns_server:debug,2014-08-19T16:49:48.709,ns_1@10.242.238.88:<0.27204.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1008 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.29188.0> [ns_server:debug,2014-08-19T16:49:48.709,ns_1@10.242.238.88:<0.27372.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1006 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.29184.0> [ns_server:debug,2014-08-19T16:49:48.709,ns_1@10.242.238.88:<0.26834.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1012 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.29187.0> [ns_server:debug,2014-08-19T16:49:48.709,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.709,ns_1@10.242.238.88:<0.26712.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1013 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.29183.0> [ns_server:debug,2014-08-19T16:49:48.709,ns_1@10.242.238.88:<0.26607.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1014 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.29190.0> [ns_server:info,2014-08-19T16:49:48.709,ns_1@10.242.238.88:<0.29164.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 755 to state replica [ns_server:debug,2014-08-19T16:49:48.712,ns_1@10.242.238.88:<0.26142.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1019 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.29185.0> [ns_server:debug,2014-08-19T16:49:48.713,ns_1@10.242.238.88:<0.27003.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1010 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.29186.0> [ns_server:debug,2014-08-19T16:49:48.716,ns_1@10.242.238.88:<0.25946.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1021 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.29189.0> [ns_server:info,2014-08-19T16:49:48.719,ns_1@10.242.238.88:<0.29174.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1009 to state replica [ns_server:info,2014-08-19T16:49:48.719,ns_1@10.242.238.88:<0.29171.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 752 to state replica [ns_server:info,2014-08-19T16:49:48.719,ns_1@10.242.238.88:<0.29169.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 754 to state replica [ns_server:info,2014-08-19T16:49:48.719,ns_1@10.242.238.88:<0.29163.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1016 to state replica [ns_server:info,2014-08-19T16:49:48.719,ns_1@10.242.238.88:<0.29173.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 750 to state replica [ns_server:info,2014-08-19T16:49:48.719,ns_1@10.242.238.88:<0.29172.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 756 to state replica [ns_server:info,2014-08-19T16:49:48.719,ns_1@10.242.238.88:<0.29168.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1020 to state replica [ns_server:info,2014-08-19T16:49:48.719,ns_1@10.242.238.88:<0.29182.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1017 to state replica [ns_server:info,2014-08-19T16:49:48.719,ns_1@10.242.238.88:<0.29179.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1002 to state replica [ns_server:info,2014-08-19T16:49:48.720,ns_1@10.242.238.88:<0.29166.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1003 to state replica [ns_server:info,2014-08-19T16:49:48.720,ns_1@10.242.238.88:<0.29180.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1011 to state replica [ns_server:info,2014-08-19T16:49:48.720,ns_1@10.242.238.88:<0.29167.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1018 to state replica [ns_server:info,2014-08-19T16:49:48.720,ns_1@10.242.238.88:<0.29176.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 758 to state replica [ns_server:info,2014-08-19T16:49:48.720,ns_1@10.242.238.88:<0.29165.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 765 to state replica [ns_server:info,2014-08-19T16:49:48.720,ns_1@10.242.238.88:<0.29181.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1004 to state replica [ns_server:info,2014-08-19T16:49:48.720,ns_1@10.242.238.88:<0.29175.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1007 to state replica [ns_server:info,2014-08-19T16:49:48.720,ns_1@10.242.238.88:<0.29170.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 761 to state replica [ns_server:info,2014-08-19T16:49:48.720,ns_1@10.242.238.88:<0.29162.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 748 to state replica [ns_server:info,2014-08-19T16:49:48.720,ns_1@10.242.238.88:<0.29183.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1013 to state replica [ns_server:info,2014-08-19T16:49:48.720,ns_1@10.242.238.88:<0.29190.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1014 to state replica [ns_server:info,2014-08-19T16:49:48.720,ns_1@10.242.238.88:<0.29184.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1006 to state replica [ns_server:info,2014-08-19T16:49:48.720,ns_1@10.242.238.88:<0.29188.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1008 to state replica [ns_server:info,2014-08-19T16:49:48.720,ns_1@10.242.238.88:<0.29177.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1015 to state replica [ns_server:info,2014-08-19T16:49:48.720,ns_1@10.242.238.88:<0.29178.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1005 to state replica [ns_server:info,2014-08-19T16:49:48.720,ns_1@10.242.238.88:<0.29187.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1012 to state replica [ns_server:info,2014-08-19T16:49:48.721,ns_1@10.242.238.88:<0.29189.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1021 to state replica [ns_server:info,2014-08-19T16:49:48.721,ns_1@10.242.238.88:<0.29186.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1010 to state replica [rebalance:debug,2014-08-19T16:49:48.721,ns_1@10.242.238.88:<0.29079.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29191.0> [ns_server:info,2014-08-19T16:49:48.721,ns_1@10.242.238.88:<0.29185.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1019 to state replica [ns_server:debug,2014-08-19T16:49:48.723,ns_1@10.242.238.88:<0.29080.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_501 [rebalance:info,2014-08-19T16:49:48.727,ns_1@10.242.238.88:<0.29079.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:49:48.728,ns_1@10.242.238.88:<0.29080.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[501]}, {checkpoints,[{501,1}]}, {name,<<"rebalance_501">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[501]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"501"}]} [rebalance:debug,2014-08-19T16:49:48.728,ns_1@10.242.238.88:<0.29080.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29197.0> [rebalance:debug,2014-08-19T16:49:48.729,ns_1@10.242.238.88:<0.29079.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.729,ns_1@10.242.238.88:<0.29079.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:48.730,ns_1@10.242.238.88:<0.29080.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:49:48.730,ns_1@10.242.238.88:<0.26572.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 503 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:48.736,ns_1@10.242.238.88:<0.29080.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.736,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 499 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:debug,2014-08-19T16:49:48.736,ns_1@10.242.238.88:<0.26580.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:49:48.736,ns_1@10.242.238.88:<0.29080.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:49:48.737,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 499) [ns_server:debug,2014-08-19T16:49:48.737,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:48.738,ns_1@10.242.238.88:<0.26799.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 501 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:48.739,ns_1@10.242.238.88:<0.26807.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:48.741,ns_1@10.242.238.88:<0.26580.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_503_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.741,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 503 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:49:48.742,ns_1@10.242.238.88:<0.29202.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 503 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:49:48.743,ns_1@10.242.238.88:<0.29082.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_509 [ns_server:info,2014-08-19T16:49:48.743,ns_1@10.242.238.88:<0.26807.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_501_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.744,ns_1@10.242.238.88:<0.29082.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[509]}, {checkpoints,[{509,1}]}, {name,<<"rebalance_509">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[509]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"509"}]} [rebalance:debug,2014-08-19T16:49:48.745,ns_1@10.242.238.88:<0.29082.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29205.0> [rebalance:info,2014-08-19T16:49:48.745,ns_1@10.242.238.88:<0.29082.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:48.747,ns_1@10.242.238.88:<0.29082.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.747,ns_1@10.242.238.88:<0.29082.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:48.748,ns_1@10.242.238.88:<0.26007.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 509 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:48.749,ns_1@10.242.238.88:<0.26015.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:48.752,ns_1@10.242.238.88:<0.26015.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_509_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:49:48.754,ns_1@10.242.238.88:<0.29148.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_494 [rebalance:info,2014-08-19T16:49:48.756,ns_1@10.242.238.88:<0.29148.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[494]}, {checkpoints,[{494,1}]}, {name,<<"rebalance_494">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[494]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"494"}]} [ns_server:debug,2014-08-19T16:49:48.756,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:48.757,ns_1@10.242.238.88:<0.29148.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29211.0> [ns_server:debug,2014-08-19T16:49:48.757,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:48.757,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{503, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.757,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.758,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:48.758,ns_1@10.242.238.88:<0.29148.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:48.761,ns_1@10.242.238.88:<0.29148.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.761,ns_1@10.242.238.88:<0.29148.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:48.762,ns_1@10.242.238.88:<0.27442.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 494 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:48.765,ns_1@10.242.238.88:<0.27450.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:49:48.765,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 503 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:48.766,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 503) [ns_server:debug,2014-08-19T16:49:48.767,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:48.767,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 501 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:49:48.768,ns_1@10.242.238.88:<0.29219.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 501 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:info,2014-08-19T16:49:48.769,ns_1@10.242.238.88:<0.27450.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_494_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:49:48.772,ns_1@10.242.238.88:<0.29150.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_751 [rebalance:info,2014-08-19T16:49:48.774,ns_1@10.242.238.88:<0.29150.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[751]}, {checkpoints,[{751,1}]}, {name,<<"rebalance_751">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[751]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"751"}]} [rebalance:debug,2014-08-19T16:49:48.775,ns_1@10.242.238.88:<0.29150.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29236.0> [ns_server:debug,2014-08-19T16:49:48.775,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.776,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.776,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{501, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.778,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.781,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:49:48.781,ns_1@10.242.238.88:<0.29150.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:48.786,ns_1@10.242.238.88:<0.29150.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.786,ns_1@10.242.238.88:<0.29150.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:49:48.788,ns_1@10.242.238.88:<0.29141.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_505 [rebalance:info,2014-08-19T16:49:48.790,ns_1@10.242.238.88:<0.27316.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 751 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:49:48.790,ns_1@10.242.238.88:<0.29141.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[505]}, {checkpoints,[{505,1}]}, {name,<<"rebalance_505">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[505]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"505"}]} [rebalance:debug,2014-08-19T16:49:48.791,ns_1@10.242.238.88:<0.29141.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29244.0> [rebalance:debug,2014-08-19T16:49:48.791,ns_1@10.242.238.88:<0.27324.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:49:48.792,ns_1@10.242.238.88:<0.29141.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:48.793,ns_1@10.242.238.88:<0.29141.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.793,ns_1@10.242.238.88:<0.29141.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:48.795,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 501 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:49:48.796,ns_1@10.242.238.88:<0.26391.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 505 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:48.796,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 501) [ns_server:info,2014-08-19T16:49:48.797,ns_1@10.242.238.88:<0.27324.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_751_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:49:48.797,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:48.797,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 509 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:49:48.797,ns_1@10.242.238.88:<0.29250.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 509 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:debug,2014-08-19T16:49:48.799,ns_1@10.242.238.88:<0.26399.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:48.802,ns_1@10.242.238.88:<0.26399.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_505_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:49:48.808,ns_1@10.242.238.88:<0.29149.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_498 [rebalance:info,2014-08-19T16:49:48.809,ns_1@10.242.238.88:<0.29149.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[498]}, {checkpoints,[{498,1}]}, {name,<<"rebalance_498">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[498]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"498"}]} [rebalance:debug,2014-08-19T16:49:48.810,ns_1@10.242.238.88:<0.29149.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29253.0> [rebalance:info,2014-08-19T16:49:48.811,ns_1@10.242.238.88:<0.29149.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:49:48.811,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.812,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:48.812,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.812,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{509, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:49:48.812,ns_1@10.242.238.88:<0.29149.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [ns_server:debug,2014-08-19T16:49:48.812,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:48.812,ns_1@10.242.238.88:<0.29149.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:48.813,ns_1@10.242.238.88:<0.27078.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 498 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:48.815,ns_1@10.242.238.88:<0.27086.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:49:48.819,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 509 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:info,2014-08-19T16:49:48.820,ns_1@10.242.238.88:<0.27086.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_498_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:49:48.820,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 509) [ns_server:debug,2014-08-19T16:49:48.821,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:48.821,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 494 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:49:48.821,ns_1@10.242.238.88:<0.29265.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 494 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:49:48.826,ns_1@10.242.238.88:<0.29135.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_496 [rebalance:info,2014-08-19T16:49:48.828,ns_1@10.242.238.88:<0.29135.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[496]}, {checkpoints,[{496,1}]}, {name,<<"rebalance_496">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[496]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"496"}]} [rebalance:debug,2014-08-19T16:49:48.829,ns_1@10.242.238.88:<0.29135.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29267.0> [rebalance:info,2014-08-19T16:49:48.830,ns_1@10.242.238.88:<0.29135.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:49:48.833,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:48.833,ns_1@10.242.238.88:<0.29135.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.833,ns_1@10.242.238.88:<0.29135.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:49:48.834,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:48.834,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.834,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{494, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.834,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:48.835,ns_1@10.242.238.88:<0.27260.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 496 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:48.836,ns_1@10.242.238.88:<0.27268.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:48.841,ns_1@10.242.238.88:<0.27268.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_496_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:49:48.844,ns_1@10.242.238.88:<0.29081.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_492 [rebalance:info,2014-08-19T16:49:48.845,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 494 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:49:48.845,ns_1@10.242.238.88:<0.29081.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[492]}, {checkpoints,[{492,1}]}, {name,<<"rebalance_492">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[492]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"492"}]} [ns_server:debug,2014-08-19T16:49:48.846,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 494) [rebalance:debug,2014-08-19T16:49:48.846,ns_1@10.242.238.88:<0.29081.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29278.0> [rebalance:info,2014-08-19T16:49:48.846,ns_1@10.242.238.88:<0.29081.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:49:48.846,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:48.847,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 751 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:49:48.847,ns_1@10.242.238.88:<0.29280.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 751 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:debug,2014-08-19T16:49:48.848,ns_1@10.242.238.88:<0.29081.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.848,ns_1@10.242.238.88:<0.29081.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:48.849,ns_1@10.242.238.88:<0.27654.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 492 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:48.850,ns_1@10.242.238.88:<0.27662.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:48.856,ns_1@10.242.238.88:<0.27662.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_492_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:49:48.858,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 927. Nacking mccouch update. [views:debug,2014-08-19T16:49:48.858,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/927. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:48.858,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",927,active,0} [ns_server:debug,2014-08-19T16:49:48.859,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009, 932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258, 1008,931,748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568, 202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878, 384,256,1006,929,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332, 694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514, 148,876,510,382,1004,927,744,616,250,122,978,850,484,356,718,590,224,952,824, 458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900,772,406,278,951, 704,640,576,512,210,146,1015,938,874,810,508,444,380,316,1002,989,742,678, 614,550,248,184,120,976,912,848,784,482,418,354,290,963,716,652,588,524,222, 158,950,886,822,456,392,328,264,1014,937,754,690,626,562,196,132,1001,988, 924,860,796,494,430,366,302,975,728,664,600,536,234,170,962,898,834,770,468, 404,340,276,949,766,702,638,574,208,144,1013,936,872,808,506,442,378,314, 1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961,714, 650,586,522,220,156,948,884,820,454,390,326,262,1012,999,935,752,688,624,560, 194,130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896, 832,768,466,402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504, 440,376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 959,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686, 558,192,920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996, 868,502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322, 995,684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632, 138,994,866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814, 448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941, 758,630,136,992,864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017, 940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266, 1016,939,756,628,134,990,862,496,368,730,602,236,108,964,836,470,342] [ns_server:debug,2014-08-19T16:49:48.860,ns_1@10.242.238.88:<0.29125.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_507 [rebalance:info,2014-08-19T16:49:48.862,ns_1@10.242.238.88:<0.29125.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[507]}, {checkpoints,[{507,1}]}, {name,<<"rebalance_507">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[507]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"507"}]} [rebalance:debug,2014-08-19T16:49:48.863,ns_1@10.242.238.88:<0.29125.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29284.0> [rebalance:info,2014-08-19T16:49:48.864,ns_1@10.242.238.88:<0.29125.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:48.865,ns_1@10.242.238.88:<0.29125.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.866,ns_1@10.242.238.88:<0.29125.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:48.867,ns_1@10.242.238.88:<0.26198.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 507 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:48.867,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.868,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.868,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:48.868,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.868,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{751, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:49:48.868,ns_1@10.242.238.88:<0.26206.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:48.871,ns_1@10.242.238.88:<0.26206.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_507_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.875,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 751 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:48.875,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 751) [ns_server:debug,2014-08-19T16:49:48.876,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:48.876,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 505 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:49:48.876,ns_1@10.242.238.88:<0.29296.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 505 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:49:48.883,ns_1@10.242.238.88:<0.29173.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_750 [rebalance:info,2014-08-19T16:49:48.885,ns_1@10.242.238.88:<0.29173.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[750]}, {checkpoints,[{750,1}]}, {name,<<"rebalance_750">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[750]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"750"}]} [ns_server:debug,2014-08-19T16:49:48.885,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:48.886,ns_1@10.242.238.88:<0.29173.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29299.0> [ns_server:debug,2014-08-19T16:49:48.886,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:48.886,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.886,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:48.886,ns_1@10.242.238.88:<0.29173.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:49:48.886,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{505, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:49:48.889,ns_1@10.242.238.88:<0.29173.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.889,ns_1@10.242.238.88:<0.29173.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:48.890,ns_1@10.242.238.88:<0.27407.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 750 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [views:debug,2014-08-19T16:49:48.892,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/927. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:48.892,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",927,active,0} [rebalance:debug,2014-08-19T16:49:48.893,ns_1@10.242.238.88:<0.27415.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:49:48.894,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 505 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:48.894,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 505) [ns_server:debug,2014-08-19T16:49:48.895,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:48.895,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 498 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:49:48.895,ns_1@10.242.238.88:<0.29309.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 498 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:info,2014-08-19T16:49:48.896,ns_1@10.242.238.88:<0.27415.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_750_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:49:48.898,ns_1@10.242.238.88:<0.29156.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_753 [rebalance:info,2014-08-19T16:49:48.899,ns_1@10.242.238.88:<0.29156.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[753]}, {checkpoints,[{753,1}]}, {name,<<"rebalance_753">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[753]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"753"}]} [rebalance:debug,2014-08-19T16:49:48.900,ns_1@10.242.238.88:<0.29156.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29311.0> [rebalance:info,2014-08-19T16:49:48.901,ns_1@10.242.238.88:<0.29156.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:48.902,ns_1@10.242.238.88:<0.29156.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.902,ns_1@10.242.238.88:<0.29156.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:48.903,ns_1@10.242.238.88:<0.27148.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 753 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:48.904,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.904,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:48.904,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:48.905,ns_1@10.242.238.88:<0.27156.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.905,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.905,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{498, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:48.907,ns_1@10.242.238.88:<0.27156.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_753_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:48.912,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 498 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:48.913,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 498) [ns_server:debug,2014-08-19T16:49:48.914,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:48.914,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 496 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:49:48.914,ns_1@10.242.238.88:<0.29323.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 496 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:49:48.914,ns_1@10.242.238.88:<0.29186.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1010 [rebalance:info,2014-08-19T16:49:48.915,ns_1@10.242.238.88:<0.29186.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1010]}, {checkpoints,[{1010,1}]}, {name,<<"rebalance_1010">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1010]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1010"}]} [rebalance:debug,2014-08-19T16:49:48.917,ns_1@10.242.238.88:<0.29186.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29324.0> [rebalance:info,2014-08-19T16:49:48.917,ns_1@10.242.238.88:<0.29186.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:48.919,ns_1@10.242.238.88:<0.29186.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.919,ns_1@10.242.238.88:<0.29186.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:48.920,ns_1@10.242.238.88:<0.27003.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1010 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:48.921,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:48.921,ns_1@10.242.238.88:<0.27011.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.921,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:48.922,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.922,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.922,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{496, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:48.925,ns_1@10.242.238.88:<0.27011.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1010_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:48.933,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 496 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:48.934,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 496) [ns_server:debug,2014-08-19T16:49:48.935,ns_1@10.242.238.88:<0.29185.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1019 [ns_server:debug,2014-08-19T16:49:48.935,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:48.935,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 492 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:49:48.935,ns_1@10.242.238.88:<0.29337.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 492 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:info,2014-08-19T16:49:48.936,ns_1@10.242.238.88:<0.29185.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1019]}, {checkpoints,[{1019,1}]}, {name,<<"rebalance_1019">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1019]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1019"}]} [rebalance:debug,2014-08-19T16:49:48.937,ns_1@10.242.238.88:<0.29185.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29338.0> [rebalance:info,2014-08-19T16:49:48.938,ns_1@10.242.238.88:<0.29185.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:48.939,ns_1@10.242.238.88:<0.29185.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.939,ns_1@10.242.238.88:<0.29185.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:48.940,ns_1@10.242.238.88:<0.26142.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1019 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:48.946,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:48.946,ns_1@10.242.238.88:<0.26150.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.947,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.947,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:48.948,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{492, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.948,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:49:48.950,ns_1@10.242.238.88:<0.26150.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1019_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:49:48.954,ns_1@10.242.238.88:<0.29183.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1013 [rebalance:info,2014-08-19T16:49:48.955,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 492 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:48.955,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 492) [rebalance:info,2014-08-19T16:49:48.956,ns_1@10.242.238.88:<0.29183.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1013]}, {checkpoints,[{1013,1}]}, {name,<<"rebalance_1013">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1013]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1013"}]} [ns_server:debug,2014-08-19T16:49:48.956,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:debug,2014-08-19T16:49:48.956,ns_1@10.242.238.88:<0.29183.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29350.0> [rebalance:info,2014-08-19T16:49:48.956,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 507 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:49:48.956,ns_1@10.242.238.88:<0.29352.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 507 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:info,2014-08-19T16:49:48.957,ns_1@10.242.238.88:<0.29183.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:48.959,ns_1@10.242.238.88:<0.29183.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.959,ns_1@10.242.238.88:<0.29183.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:48.960,ns_1@10.242.238.88:<0.26712.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1013 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:48.964,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:48.965,ns_1@10.242.238.88:<0.26720.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.965,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:48.965,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.965,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{507, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:48.966,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:49:48.968,ns_1@10.242.238.88:<0.26720.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1013_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:49:48.970,ns_1@10.242.238.88:<0.29153.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_502 [rebalance:info,2014-08-19T16:49:48.972,ns_1@10.242.238.88:<0.29153.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[502]}, {checkpoints,[{502,1}]}, {name,<<"rebalance_502">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[502]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"502"}]} [rebalance:debug,2014-08-19T16:49:48.973,ns_1@10.242.238.88:<0.29153.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29376.0> [rebalance:info,2014-08-19T16:49:48.973,ns_1@10.242.238.88:<0.29153.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:48.975,ns_1@10.242.238.88:<0.29153.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.975,ns_1@10.242.238.88:<0.29153.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:48.976,ns_1@10.242.238.88:<0.26677.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 502 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:48.977,ns_1@10.242.238.88:<0.26685.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:49:48.978,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 507 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:48.979,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:48.979,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 750 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:49:48.980,ns_1@10.242.238.88:<0.29380.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 750 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:info,2014-08-19T16:49:48.981,ns_1@10.242.238.88:<0.26685.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_502_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:49:48.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 507) [ns_server:debug,2014-08-19T16:49:48.985,ns_1@10.242.238.88:<0.29154.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_763 [rebalance:info,2014-08-19T16:49:48.987,ns_1@10.242.238.88:<0.29154.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[763]}, {checkpoints,[{763,1}]}, {name,<<"rebalance_763">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[763]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"763"}]} [rebalance:debug,2014-08-19T16:49:48.987,ns_1@10.242.238.88:<0.29154.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29383.0> [rebalance:info,2014-08-19T16:49:48.988,ns_1@10.242.238.88:<0.29154.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:48.991,ns_1@10.242.238.88:<0.29154.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:48.991,ns_1@10.242.238.88:<0.29154.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:48.992,ns_1@10.242.238.88:<0.26177.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 763 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:48.993,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:48.994,ns_1@10.242.238.88:<0.26185.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:48.994,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:48.994,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.994,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:48.994,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{750, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:48.998,ns_1@10.242.238.88:<0.26185.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_763_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:49.000,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 750 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.000,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 750) [ns_server:debug,2014-08-19T16:49:49.001,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.001,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 753 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:49:49.001,ns_1@10.242.238.88:<0.29395.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 753 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:49:49.008,ns_1@10.242.238.88:<0.29184.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1006 [rebalance:info,2014-08-19T16:49:49.010,ns_1@10.242.238.88:<0.29184.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1006]}, {checkpoints,[{1006,1}]}, {name,<<"rebalance_1006">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1006]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1006"}]} [rebalance:debug,2014-08-19T16:49:49.010,ns_1@10.242.238.88:<0.29184.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29396.0> [rebalance:info,2014-08-19T16:49:49.011,ns_1@10.242.238.88:<0.29184.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.013,ns_1@10.242.238.88:<0.29184.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.013,ns_1@10.242.238.88:<0.29184.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.014,ns_1@10.242.238.88:<0.27372.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1006 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:49.024,ns_1@10.242.238.88:<0.29152.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_500 [rebalance:info,2014-08-19T16:49:49.026,ns_1@10.242.238.88:<0.29152.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[500]}, {checkpoints,[{500,1}]}, {name,<<"rebalance_500">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[500]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"500"}]} [rebalance:debug,2014-08-19T16:49:49.027,ns_1@10.242.238.88:<0.29152.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29398.0> [rebalance:info,2014-08-19T16:49:49.028,ns_1@10.242.238.88:<0.29152.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.030,ns_1@10.242.238.88:<0.29152.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.030,ns_1@10.242.238.88:<0.29152.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.031,ns_1@10.242.238.88:<0.26891.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 500 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:49.032,ns_1@10.242.238.88:<0.26899.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.036,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:49.036,ns_1@10.242.238.88:<0.27380.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.037,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{753, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.037,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.037,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.038,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:49:49.039,ns_1@10.242.238.88:<0.26899.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_500_'ns_1@10.242.238.91'">>] [ns_server:info,2014-08-19T16:49:49.039,ns_1@10.242.238.88:<0.27380.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1006_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:49:49.041,ns_1@10.242.238.88:<0.29188.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1008 [rebalance:info,2014-08-19T16:49:49.043,ns_1@10.242.238.88:<0.29188.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1008]}, {checkpoints,[{1008,1}]}, {name,<<"rebalance_1008">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1008]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1008"}]} [rebalance:info,2014-08-19T16:49:49.044,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 753 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:debug,2014-08-19T16:49:49.044,ns_1@10.242.238.88:<0.29188.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29411.0> [ns_server:debug,2014-08-19T16:49:49.044,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 753) [rebalance:info,2014-08-19T16:49:49.045,ns_1@10.242.238.88:<0.29188.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:49:49.046,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.046,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1010 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:49.046,ns_1@10.242.238.88:<0.29413.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1010 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:debug,2014-08-19T16:49:49.047,ns_1@10.242.238.88:<0.29188.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.047,ns_1@10.242.238.88:<0.29188.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.048,ns_1@10.242.238.88:<0.27204.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1008 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:49.050,ns_1@10.242.238.88:<0.27212.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:49.053,ns_1@10.242.238.88:<0.27212.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1008_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:49:49.059,ns_1@10.242.238.88:<0.29163.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1016 [ns_server:debug,2014-08-19T16:49:49.060,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 925. Nacking mccouch update. [views:debug,2014-08-19T16:49:49.060,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/925. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:49.060,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",925,active,0} [rebalance:info,2014-08-19T16:49:49.060,ns_1@10.242.238.88:<0.29163.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1016]}, {checkpoints,[{1016,1}]}, {name,<<"rebalance_1016">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1016]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1016"}]} [rebalance:debug,2014-08-19T16:49:49.061,ns_1@10.242.238.88:<0.29163.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29417.0> [rebalance:info,2014-08-19T16:49:49.062,ns_1@10.242.238.88:<0.29163.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:49:49.062,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009, 932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258, 1008,931,748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568, 202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878, 384,256,1006,929,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332, 694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514, 148,876,510,382,1004,927,744,616,250,122,978,850,484,356,718,590,224,952,824, 458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900,772,406,278,951, 704,640,576,512,210,146,1015,938,874,810,508,444,380,316,1002,989,925,742, 678,614,550,248,184,120,976,912,848,784,482,418,354,290,963,716,652,588,524, 222,158,950,886,822,456,392,328,264,1014,937,754,690,626,562,196,132,1001, 988,924,860,796,494,430,366,302,975,728,664,600,536,234,170,962,898,834,770, 468,404,340,276,949,766,702,638,574,208,144,1013,936,872,808,506,442,378,314, 1000,987,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961,714, 650,586,522,220,156,948,884,820,454,390,326,262,1012,999,935,752,688,624,560, 194,130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896, 832,768,466,402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504, 440,376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 959,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686, 558,192,920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996, 868,502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322, 995,684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632, 138,994,866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814, 448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941, 758,630,136,992,864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017, 940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266, 1016,939,756,628,134,990,862,496,368,730,602,236,108,964,836,470,342] [ns_server:debug,2014-08-19T16:49:49.066,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:49.066,ns_1@10.242.238.88:<0.29163.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.066,ns_1@10.242.238.88:<0.29163.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:49:49.066,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.067,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:49.067,ns_1@10.242.238.88:<0.26412.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1016 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:49.067,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1010, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.067,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:49.069,ns_1@10.242.238.88:<0.26428.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:49.072,ns_1@10.242.238.88:<0.26428.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1016_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:49.074,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1010 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.075,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1010) [ns_server:debug,2014-08-19T16:49:49.076,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.077,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1019 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:49.077,ns_1@10.242.238.88:<0.29429.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1019 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:49:49.081,ns_1@10.242.238.88:<0.29157.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_759 [rebalance:info,2014-08-19T16:49:49.083,ns_1@10.242.238.88:<0.29157.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[759]}, {checkpoints,[{759,1}]}, {name,<<"rebalance_759">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[759]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"759"}]} [rebalance:debug,2014-08-19T16:49:49.084,ns_1@10.242.238.88:<0.29157.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29431.0> [rebalance:info,2014-08-19T16:49:49.085,ns_1@10.242.238.88:<0.29157.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.087,ns_1@10.242.238.88:<0.29157.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.087,ns_1@10.242.238.88:<0.29157.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.089,ns_1@10.242.238.88:<0.26543.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 759 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:49.094,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:49.094,ns_1@10.242.238.88:<0.26551.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.094,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.094,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.095,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1019, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.095,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:49:49.097,ns_1@10.242.238.88:<0.26551.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_759_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:49:49.100,ns_1@10.242.238.88:<0.29177.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1015 [rebalance:info,2014-08-19T16:49:49.101,ns_1@10.242.238.88:<0.29177.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1015]}, {checkpoints,[{1015,1}]}, {name,<<"rebalance_1015">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1015]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1015"}]} [rebalance:debug,2014-08-19T16:49:49.102,ns_1@10.242.238.88:<0.29177.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29441.0> [rebalance:info,2014-08-19T16:49:49.103,ns_1@10.242.238.88:<0.29177.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.104,ns_1@10.242.238.88:<0.29177.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.104,ns_1@10.242.238.88:<0.29177.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.105,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1019 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:49:49.105,ns_1@10.242.238.88:<0.26503.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1015 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:49.105,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1019) [ns_server:debug,2014-08-19T16:49:49.106,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.106,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1013 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:49.106,ns_1@10.242.238.88:<0.29444.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1013 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:debug,2014-08-19T16:49:49.106,ns_1@10.242.238.88:<0.26511.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:49.111,ns_1@10.242.238.88:<0.26511.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1015_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:49:49.115,ns_1@10.242.238.88:<0.29164.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_755 [rebalance:info,2014-08-19T16:49:49.117,ns_1@10.242.238.88:<0.29164.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[755]}, {checkpoints,[{755,1}]}, {name,<<"rebalance_755">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[755]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"755"}]} [rebalance:debug,2014-08-19T16:49:49.118,ns_1@10.242.238.88:<0.29164.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29448.0> [rebalance:info,2014-08-19T16:49:49.118,ns_1@10.242.238.88:<0.29164.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [views:debug,2014-08-19T16:49:49.119,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/925. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:49.119,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",925,active,0} [rebalance:debug,2014-08-19T16:49:49.121,ns_1@10.242.238.88:<0.29164.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.121,ns_1@10.242.238.88:<0.29164.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.122,ns_1@10.242.238.88:<0.26947.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 755 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:49.122,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.123,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.123,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:49.123,ns_1@10.242.238.88:<0.26955.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.123,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.123,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1013, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:49:49.127,ns_1@10.242.238.88:<0.26955.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_755_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:49.134,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1013 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.134,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1013) [ns_server:debug,2014-08-19T16:49:49.135,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.135,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 502 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:49:49.135,ns_1@10.242.238.88:<0.29460.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 502 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:49:49.136,ns_1@10.242.238.88:<0.29176.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_758 [rebalance:info,2014-08-19T16:49:49.137,ns_1@10.242.238.88:<0.29176.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[758]}, {checkpoints,[{758,1}]}, {name,<<"rebalance_758">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[758]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"758"}]} [rebalance:debug,2014-08-19T16:49:49.138,ns_1@10.242.238.88:<0.29176.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29461.0> [rebalance:info,2014-08-19T16:49:49.138,ns_1@10.242.238.88:<0.29176.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.140,ns_1@10.242.238.88:<0.29176.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.140,ns_1@10.242.238.88:<0.29176.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.141,ns_1@10.242.238.88:<0.26642.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 758 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:49.143,ns_1@10.242.238.88:<0.26650.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:49.148,ns_1@10.242.238.88:<0.26650.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_758_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:49:49.149,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.150,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.150,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{502, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.151,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.151,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.155,ns_1@10.242.238.88:<0.29162.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_748 [ns_server:info,2014-08-19T16:49:49.156,ns_1@10.242.238.88:<0.20999.0>:ns_orchestrator:handle_info:428]Skipping janitor in state rebalancing [rebalance:info,2014-08-19T16:49:49.156,ns_1@10.242.238.88:<0.29162.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[748]}, {checkpoints,[{748,1}]}, {name,<<"rebalance_748">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[748]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"748"}]} [rebalance:debug,2014-08-19T16:49:49.157,ns_1@10.242.238.88:<0.29162.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29477.0> [rebalance:info,2014-08-19T16:49:49.158,ns_1@10.242.238.88:<0.29162.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.160,ns_1@10.242.238.88:<0.29162.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.160,ns_1@10.242.238.88:<0.29162.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.161,ns_1@10.242.238.88:<0.27616.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 748 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:49:49.162,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 502 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.162,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 502) [ns_server:debug,2014-08-19T16:49:49.163,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.163,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 763 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:49:49.163,ns_1@10.242.238.88:<0.29480.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 763 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:debug,2014-08-19T16:49:49.169,ns_1@10.242.238.88:<0.27624.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.170,ns_1@10.242.238.88:<0.29178.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1005 [rebalance:info,2014-08-19T16:49:49.172,ns_1@10.242.238.88:<0.29178.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1005]}, {checkpoints,[{1005,1}]}, {name,<<"rebalance_1005">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1005]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1005"}]} [ns_server:info,2014-08-19T16:49:49.172,ns_1@10.242.238.88:<0.27624.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_748_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:49:49.172,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:49.173,ns_1@10.242.238.88:<0.29178.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29485.0> [ns_server:debug,2014-08-19T16:49:49.174,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.174,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:49:49.174,ns_1@10.242.238.88:<0.29178.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:49:49.175,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{763, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.175,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:49.176,ns_1@10.242.238.88:<0.29178.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.176,ns_1@10.242.238.88:<0.29178.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.177,ns_1@10.242.238.88:<0.27477.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1005 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:49.179,ns_1@10.242.238.88:<0.27485.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:49:49.182,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 763 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.183,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 763) [ns_server:debug,2014-08-19T16:49:49.184,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.184,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 500 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:49:49.184,ns_1@10.242.238.88:<0.29494.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 500 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:info,2014-08-19T16:49:49.185,ns_1@10.242.238.88:<0.27485.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1005_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:49:49.197,ns_1@10.242.238.88:<0.29181.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1004 [rebalance:info,2014-08-19T16:49:49.199,ns_1@10.242.238.88:<0.29181.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1004]}, {checkpoints,[{1004,1}]}, {name,<<"rebalance_1004">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1004]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1004"}]} [rebalance:debug,2014-08-19T16:49:49.200,ns_1@10.242.238.88:<0.29181.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29505.0> [rebalance:info,2014-08-19T16:49:49.201,ns_1@10.242.238.88:<0.29181.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.202,ns_1@10.242.238.88:<0.29181.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.203,ns_1@10.242.238.88:<0.29181.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.203,ns_1@10.242.238.88:<0.27587.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1004 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:49.204,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.205,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{500, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.206,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.206,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:49.206,ns_1@10.242.238.88:<0.27595.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.208,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:49.214,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 500 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.215,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 500) [ns_server:info,2014-08-19T16:49:49.215,ns_1@10.242.238.88:<0.27595.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1004_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:49:49.216,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.216,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1006 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:49.216,ns_1@10.242.238.88:<0.29524.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1006 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:49:49.218,ns_1@10.242.238.88:<0.29155.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_749 [rebalance:info,2014-08-19T16:49:49.220,ns_1@10.242.238.88:<0.29155.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[749]}, {checkpoints,[{749,1}]}, {name,<<"rebalance_749">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[749]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"749"}]} [rebalance:debug,2014-08-19T16:49:49.220,ns_1@10.242.238.88:<0.29155.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29525.0> [rebalance:info,2014-08-19T16:49:49.221,ns_1@10.242.238.88:<0.29155.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.223,ns_1@10.242.238.88:<0.29155.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.224,ns_1@10.242.238.88:<0.29155.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.227,ns_1@10.242.238.88:<0.27512.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 749 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:49.232,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.233,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.233,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.233,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.234,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1006, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.236,ns_1@10.242.238.88:<0.29182.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1017 [rebalance:info,2014-08-19T16:49:49.238,ns_1@10.242.238.88:<0.29182.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1017]}, {checkpoints,[{1017,1}]}, {name,<<"rebalance_1017">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1017]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1017"}]} [rebalance:debug,2014-08-19T16:49:49.238,ns_1@10.242.238.88:<0.29182.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29533.0> [rebalance:info,2014-08-19T16:49:49.239,ns_1@10.242.238.88:<0.29182.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.241,ns_1@10.242.238.88:<0.29182.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.241,ns_1@10.242.238.88:<0.29182.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.242,ns_1@10.242.238.88:<0.26318.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1017 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:49.244,ns_1@10.242.238.88:<0.26326.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.251,ns_1@10.242.238.88:<0.29158.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_757 [rebalance:info,2014-08-19T16:49:49.252,ns_1@10.242.238.88:<0.29158.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[757]}, {checkpoints,[{757,1}]}, {name,<<"rebalance_757">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[757]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"757"}]} [rebalance:debug,2014-08-19T16:49:49.253,ns_1@10.242.238.88:<0.29158.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29535.0> [rebalance:info,2014-08-19T16:49:49.253,ns_1@10.242.238.88:<0.29158.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.255,ns_1@10.242.238.88:<0.29158.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.255,ns_1@10.242.238.88:<0.29158.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.255,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1006 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:debug,2014-08-19T16:49:49.256,ns_1@10.242.238.88:<0.27520.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:49:49.256,ns_1@10.242.238.88:<0.26778.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 757 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:49.258,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:debug,2014-08-19T16:49:49.258,ns_1@10.242.238.88:<0.26786.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:49.258,ns_1@10.242.238.88:<0.26326.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1017_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:49.258,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1008 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:49.258,ns_1@10.242.238.88:<0.29540.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1008 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:49:49.259,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1006) [ns_server:info,2014-08-19T16:49:49.260,ns_1@10.242.238.88:<0.27520.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_749_'ns_1@10.242.238.91'">>] [ns_server:info,2014-08-19T16:49:49.261,ns_1@10.242.238.88:<0.26786.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_757_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:49:49.268,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.269,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.269,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.269,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1008, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.270,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.270,ns_1@10.242.238.88:<0.29175.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1007 [rebalance:info,2014-08-19T16:49:49.272,ns_1@10.242.238.88:<0.29175.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1007]}, {checkpoints,[{1007,1}]}, {name,<<"rebalance_1007">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1007]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1007"}]} [rebalance:debug,2014-08-19T16:49:49.273,ns_1@10.242.238.88:<0.29175.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29547.0> [rebalance:info,2014-08-19T16:49:49.278,ns_1@10.242.238.88:<0.29175.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.281,ns_1@10.242.238.88:<0.29175.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.281,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1008 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:49:49.281,ns_1@10.242.238.88:<0.29175.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:49:49.282,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1008) [rebalance:info,2014-08-19T16:49:49.282,ns_1@10.242.238.88:<0.27295.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1007 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:49.283,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.283,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1016 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:49.283,ns_1@10.242.238.88:<0.29556.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1016 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:debug,2014-08-19T16:49:49.284,ns_1@10.242.238.88:<0.27303.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.287,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 923. Nacking mccouch update. [views:debug,2014-08-19T16:49:49.287,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/923. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:49.287,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",923,active,0} [ns_server:info,2014-08-19T16:49:49.287,ns_1@10.242.238.88:<0.27303.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1007_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:49:49.290,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009, 932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258, 1008,931,748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568, 202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878, 384,256,1006,929,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332, 694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514, 148,876,510,382,1004,927,744,616,250,122,978,850,484,356,718,590,224,952,824, 458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900,772,406,278,951, 704,640,576,512,210,146,1015,938,874,810,508,444,380,316,1002,989,925,742, 678,614,550,248,184,120,976,912,848,784,482,418,354,290,963,716,652,588,524, 222,158,950,886,822,456,392,328,264,1014,937,754,690,626,562,196,132,1001, 988,924,860,796,494,430,366,302,975,728,664,600,536,234,170,962,898,834,770, 468,404,340,276,949,766,702,638,574,208,144,1013,936,872,808,506,442,378,314, 1000,987,923,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961, 714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,935,752,688,624, 560,194,130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960, 896,832,768,466,402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806, 504,440,376,312,985,738,674,610,546,244,180,116,972,908,844,780,478,414,350, 286,959,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997, 686,558,192,920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140, 996,868,502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450, 322,995,684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760, 632,138,994,866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942, 814,448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018, 941,758,630,136,992,864,498,370,732,604,238,110,966,838,472,344,706,578,212, 1017,940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394, 266,1016,939,756,628,134,990,862,496,368,730,602,236,108,964,836,470,342] [ns_server:debug,2014-08-19T16:49:49.293,ns_1@10.242.238.88:<0.29169.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_754 [rebalance:info,2014-08-19T16:49:49.295,ns_1@10.242.238.88:<0.29169.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[754]}, {checkpoints,[{754,1}]}, {name,<<"rebalance_754">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[754]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"754"}]} [ns_server:debug,2014-08-19T16:49:49.295,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:49.296,ns_1@10.242.238.88:<0.29169.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29561.0> [ns_server:debug,2014-08-19T16:49:49.296,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.296,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.297,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.297,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1016, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:49:49.299,ns_1@10.242.238.88:<0.29169.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.301,ns_1@10.242.238.88:<0.29169.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.301,ns_1@10.242.238.88:<0.29169.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.302,ns_1@10.242.238.88:<0.27038.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 754 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:49:49.302,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1016 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.303,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1016) [ns_server:debug,2014-08-19T16:49:49.304,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:debug,2014-08-19T16:49:49.304,ns_1@10.242.238.88:<0.27046.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:49:49.304,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 759 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:49:49.304,ns_1@10.242.238.88:<0.29570.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 759 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:info,2014-08-19T16:49:49.307,ns_1@10.242.238.88:<0.27046.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_754_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:49:49.309,ns_1@10.242.238.88:<0.29165.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_765 [rebalance:info,2014-08-19T16:49:49.312,ns_1@10.242.238.88:<0.29165.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[765]}, {checkpoints,[{765,1}]}, {name,<<"rebalance_765">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[765]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"765"}]} [rebalance:debug,2014-08-19T16:49:49.312,ns_1@10.242.238.88:<0.29165.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29573.0> [rebalance:info,2014-08-19T16:49:49.313,ns_1@10.242.238.88:<0.29165.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.315,ns_1@10.242.238.88:<0.29165.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.315,ns_1@10.242.238.88:<0.29165.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.316,ns_1@10.242.238.88:<0.25981.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 765 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:49.316,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.317,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.317,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.317,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{759, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.317,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:49.317,ns_1@10.242.238.88:<0.25989.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:49.320,ns_1@10.242.238.88:<0.25989.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_765_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:49:49.326,ns_1@10.242.238.88:<0.29151.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_747 [rebalance:info,2014-08-19T16:49:49.327,ns_1@10.242.238.88:<0.29151.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[747]}, {checkpoints,[{747,1}]}, {name,<<"rebalance_747">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[747]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"747"}]} [rebalance:debug,2014-08-19T16:49:49.327,ns_1@10.242.238.88:<0.29151.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29583.0> [rebalance:info,2014-08-19T16:49:49.330,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 759 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.331,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 759) [ns_server:debug,2014-08-19T16:49:49.333,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.333,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1015 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:49.333,ns_1@10.242.238.88:<0.29586.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1015 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:49:49.333,ns_1@10.242.238.88:<0.29151.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.335,ns_1@10.242.238.88:<0.29151.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.335,ns_1@10.242.238.88:<0.29151.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.336,ns_1@10.242.238.88:<0.27711.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 747 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:49.342,ns_1@10.242.238.88:<0.29171.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_752 [rebalance:info,2014-08-19T16:49:49.344,ns_1@10.242.238.88:<0.29171.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[752]}, {checkpoints,[{752,1}]}, {name,<<"rebalance_752">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[752]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"752"}]} [rebalance:debug,2014-08-19T16:49:49.345,ns_1@10.242.238.88:<0.29171.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29588.0> [views:debug,2014-08-19T16:49:49.346,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/923. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:49.346,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",923,active,0} [rebalance:info,2014-08-19T16:49:49.346,ns_1@10.242.238.88:<0.29171.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.348,ns_1@10.242.238.88:<0.27719.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.349,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:49.349,ns_1@10.242.238.88:<0.29171.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.350,ns_1@10.242.238.88:<0.29171.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:49:49.350,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.350,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1015, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.350,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.350,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:49.350,ns_1@10.242.238.88:<0.27239.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 752 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:info,2014-08-19T16:49:49.351,ns_1@10.242.238.88:<0.27719.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_747_'ns_1@10.242.238.91'">>] [rebalance:debug,2014-08-19T16:49:49.352,ns_1@10.242.238.88:<0.27247.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:49.355,ns_1@10.242.238.88:<0.27247.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_752_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:49:49.357,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1015 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.358,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1015) [ns_server:debug,2014-08-19T16:49:49.359,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.359,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 755 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:49:49.359,ns_1@10.242.238.88:<0.29602.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 755 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:49:49.360,ns_1@10.242.238.88:<0.29167.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1018 [rebalance:info,2014-08-19T16:49:49.361,ns_1@10.242.238.88:<0.29167.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1018]}, {checkpoints,[{1018,1}]}, {name,<<"rebalance_1018">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1018]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1018"}]} [rebalance:debug,2014-08-19T16:49:49.362,ns_1@10.242.238.88:<0.29167.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29603.0> [rebalance:info,2014-08-19T16:49:49.363,ns_1@10.242.238.88:<0.29167.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.364,ns_1@10.242.238.88:<0.29167.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.364,ns_1@10.242.238.88:<0.29167.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.368,ns_1@10.242.238.88:<0.26233.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1018 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:49.373,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:49.373,ns_1@10.242.238.88:<0.26241.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.374,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{755, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.374,ns_1@10.242.238.88:<0.29190.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1014 [ns_server:debug,2014-08-19T16:49:49.374,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.374,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.375,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:49.376,ns_1@10.242.238.88:<0.29190.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1014]}, {checkpoints,[{1014,1}]}, {name,<<"rebalance_1014">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1014]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1014"}]} [rebalance:debug,2014-08-19T16:49:49.377,ns_1@10.242.238.88:<0.29190.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29609.0> [ns_server:info,2014-08-19T16:49:49.377,ns_1@10.242.238.88:<0.26241.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1018_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:49.378,ns_1@10.242.238.88:<0.29190.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.380,ns_1@10.242.238.88:<0.29190.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.380,ns_1@10.242.238.88:<0.29190.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.381,ns_1@10.242.238.88:<0.26607.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1014 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:info,2014-08-19T16:49:49.382,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 755 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.383,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 755) [rebalance:debug,2014-08-19T16:49:49.383,ns_1@10.242.238.88:<0.26615.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.384,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.384,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 758 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:49:49.384,ns_1@10.242.238.88:<0.29617.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 758 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:info,2014-08-19T16:49:49.386,ns_1@10.242.238.88:<0.26615.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1014_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:49:49.391,ns_1@10.242.238.88:<0.29170.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_761 [ns_server:debug,2014-08-19T16:49:49.393,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:49.393,ns_1@10.242.238.88:<0.29170.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[761]}, {checkpoints,[{761,1}]}, {name,<<"rebalance_761">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[761]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"761"}]} [ns_server:debug,2014-08-19T16:49:49.394,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.394,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{758, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.394,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.394,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:debug,2014-08-19T16:49:49.394,ns_1@10.242.238.88:<0.29170.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29622.0> [rebalance:info,2014-08-19T16:49:49.395,ns_1@10.242.238.88:<0.29170.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.397,ns_1@10.242.238.88:<0.29170.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.397,ns_1@10.242.238.88:<0.29170.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.399,ns_1@10.242.238.88:<0.26355.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 761 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:49:49.401,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 758 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:debug,2014-08-19T16:49:49.401,ns_1@10.242.238.88:<0.26363.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.401,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 758) [ns_server:debug,2014-08-19T16:49:49.402,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.402,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 748 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:49:49.402,ns_1@10.242.238.88:<0.29631.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 748 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:info,2014-08-19T16:49:49.405,ns_1@10.242.238.88:<0.26363.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_761_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:49:49.413,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.414,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.414,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{748, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.414,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.415,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.415,ns_1@10.242.238.88:<0.29189.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1021 [rebalance:info,2014-08-19T16:49:49.416,ns_1@10.242.238.88:<0.29189.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1021]}, {checkpoints,[{1021,1}]}, {name,<<"rebalance_1021">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1021]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1021"}]} [rebalance:debug,2014-08-19T16:49:49.417,ns_1@10.242.238.88:<0.29189.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29636.0> [rebalance:info,2014-08-19T16:49:49.418,ns_1@10.242.238.88:<0.29189.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.419,ns_1@10.242.238.88:<0.29189.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.419,ns_1@10.242.238.88:<0.29189.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.420,ns_1@10.242.238.88:<0.25946.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1021 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:49.422,ns_1@10.242.238.88:<0.25954.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:49:49.424,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 748 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.424,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 748) [ns_server:debug,2014-08-19T16:49:49.425,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.425,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1005 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:49.426,ns_1@10.242.238.88:<0.29644.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1005 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:49:49.427,ns_1@10.242.238.88:<0.29174.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1009 [rebalance:info,2014-08-19T16:49:49.428,ns_1@10.242.238.88:<0.29174.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1009]}, {checkpoints,[{1009,1}]}, {name,<<"rebalance_1009">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1009]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1009"}]} [rebalance:debug,2014-08-19T16:49:49.429,ns_1@10.242.238.88:<0.29174.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29646.0> [ns_server:info,2014-08-19T16:49:49.430,ns_1@10.242.238.88:<0.25954.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1021_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:49.431,ns_1@10.242.238.88:<0.29174.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.432,ns_1@10.242.238.88:<0.29174.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.432,ns_1@10.242.238.88:<0.29174.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.433,ns_1@10.242.238.88:<0.27113.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1009 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:49.435,ns_1@10.242.238.88:<0.27121.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:49.440,ns_1@10.242.238.88:<0.27121.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1009_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:49:49.442,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.443,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.443,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.443,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.444,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1005, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.447,ns_1@10.242.238.88:<0.29166.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1003 [rebalance:info,2014-08-19T16:49:49.449,ns_1@10.242.238.88:<0.29166.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1003]}, {checkpoints,[{1003,1}]}, {name,<<"rebalance_1003">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1003]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1003"}]} [rebalance:debug,2014-08-19T16:49:49.449,ns_1@10.242.238.88:<0.29166.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29672.0> [rebalance:info,2014-08-19T16:49:49.450,ns_1@10.242.238.88:<0.29166.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:49:49.451,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1005 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:debug,2014-08-19T16:49:49.452,ns_1@10.242.238.88:<0.29166.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.452,ns_1@10.242.238.88:<0.29166.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:49:49.452,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1005) [rebalance:info,2014-08-19T16:49:49.453,ns_1@10.242.238.88:<0.27690.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1003 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:49.453,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.453,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1004 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:49.453,ns_1@10.242.238.88:<0.29675.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1004 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:debug,2014-08-19T16:49:49.454,ns_1@10.242.238.88:<0.27698.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:49.457,ns_1@10.242.238.88:<0.27698.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1003_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:49:49.462,ns_1@10.242.238.88:<0.29187.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1012 [rebalance:info,2014-08-19T16:49:49.463,ns_1@10.242.238.88:<0.29187.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1012]}, {checkpoints,[{1012,1}]}, {name,<<"rebalance_1012">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1012]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1012"}]} [ns_server:debug,2014-08-19T16:49:49.464,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:49.464,ns_1@10.242.238.88:<0.29187.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29681.0> [ns_server:debug,2014-08-19T16:49:49.465,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1004, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.465,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:49:49.466,ns_1@10.242.238.88:<0.29187.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:49:49.466,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.467,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:49:49.468,ns_1@10.242.238.88:<0.29187.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.469,ns_1@10.242.238.88:<0.29187.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.471,ns_1@10.242.238.88:<0.26834.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1012 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:49.473,ns_1@10.242.238.88:<0.26842.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:49:49.477,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1004 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.478,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1004) [ns_server:debug,2014-08-19T16:49:49.479,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:info,2014-08-19T16:49:49.479,ns_1@10.242.238.88:<0.26842.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1012_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:49.479,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1017 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:49.479,ns_1@10.242.238.88:<0.29691.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1017 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:49:49.481,ns_1@10.242.238.88:<0.29168.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1020 [rebalance:info,2014-08-19T16:49:49.483,ns_1@10.242.238.88:<0.29168.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1020]}, {checkpoints,[{1020,1}]}, {name,<<"rebalance_1020">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1020]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1020"}]} [rebalance:debug,2014-08-19T16:49:49.484,ns_1@10.242.238.88:<0.29168.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29692.0> [rebalance:info,2014-08-19T16:49:49.484,ns_1@10.242.238.88:<0.29168.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.486,ns_1@10.242.238.88:<0.29168.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.486,ns_1@10.242.238.88:<0.29168.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.486,ns_1@10.242.238.88:<0.26042.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1020 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:49.488,ns_1@10.242.238.88:<0.26050.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:49.491,ns_1@10.242.238.88:<0.26050.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1020_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:49:49.500,ns_1@10.242.238.88:<0.29180.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1011 [rebalance:info,2014-08-19T16:49:49.501,ns_1@10.242.238.88:<0.29180.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1011]}, {checkpoints,[{1011,1}]}, {name,<<"rebalance_1011">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1011]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1011"}]} [rebalance:debug,2014-08-19T16:49:49.502,ns_1@10.242.238.88:<0.29180.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29696.0> [rebalance:info,2014-08-19T16:49:49.503,ns_1@10.242.238.88:<0.29180.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.504,ns_1@10.242.238.88:<0.29180.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [ns_server:debug,2014-08-19T16:49:49.505,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.505,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 921. Nacking mccouch update. [rebalance:info,2014-08-19T16:49:49.505,ns_1@10.242.238.88:<0.29180.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [views:debug,2014-08-19T16:49:49.505,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/921. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:49.505,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",921,active,0} [ns_server:debug,2014-08-19T16:49:49.505,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:49:49.506,ns_1@10.242.238.88:<0.26912.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1011 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:49:49.506,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1017, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:49:49.507,ns_1@10.242.238.88:<0.26920.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.507,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009, 932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258, 1008,931,748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568, 202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878, 384,256,1006,929,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332, 694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514, 148,876,510,382,1004,927,744,616,250,122,978,850,484,356,718,590,224,952,824, 458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900,772,406,278,951, 704,640,576,512,210,146,1015,938,874,810,508,444,380,316,1002,989,925,742, 678,614,550,248,184,120,976,912,848,784,482,418,354,290,963,716,652,588,524, 222,158,950,886,822,456,392,328,264,1014,937,754,690,626,562,196,132,1001, 988,924,860,796,494,430,366,302,975,728,664,600,536,234,170,962,898,834,770, 468,404,340,276,949,766,702,638,574,208,144,1013,936,872,808,506,442,378,314, 1000,987,923,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961, 714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,935,752,688,624, 560,194,130,986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960, 896,832,768,466,402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806, 504,440,376,312,985,921,738,674,610,546,244,180,116,972,908,844,780,478,414, 350,286,959,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010, 997,686,558,192,920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634, 140,996,868,502,374,736,608,242,114,970,842,476,348,710,582,216,1021,944,816, 450,322,995,684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943, 760,632,138,994,866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019, 942,814,448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268, 1018,941,758,630,136,992,864,498,370,732,604,238,110,966,838,472,344,706,578, 212,1017,940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888, 394,266,1016,939,756,628,134,990,862,496,368,730,602,236,108,964,836,470,342] [ns_server:debug,2014-08-19T16:49:49.509,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:49:49.510,ns_1@10.242.238.88:<0.26920.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1011_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:49:49.511,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:49.513,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1017 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.514,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1017) [ns_server:debug,2014-08-19T16:49:49.515,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.515,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 749 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:49:49.515,ns_1@10.242.238.88:<0.29708.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 749 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:49:49.518,ns_1@10.242.238.88:<0.29179.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1002 [rebalance:info,2014-08-19T16:49:49.519,ns_1@10.242.238.88:<0.29179.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1002]}, {checkpoints,[{1002,1}]}, {name,<<"rebalance_1002">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1002]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1002"}]} [rebalance:debug,2014-08-19T16:49:49.520,ns_1@10.242.238.88:<0.29179.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29710.0> [rebalance:info,2014-08-19T16:49:49.521,ns_1@10.242.238.88:<0.29179.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:49:49.523,ns_1@10.242.238.88:<0.29179.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.523,ns_1@10.242.238.88:<0.29179.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:49:49.524,ns_1@10.242.238.88:<0.27767.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1002 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:49:49.528,ns_1@10.242.238.88:<0.27775.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:49:49.530,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.531,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{749, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.531,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.531,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.532,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.533,ns_1@10.242.238.88:<0.29172.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_756 [ns_server:info,2014-08-19T16:49:49.533,ns_1@10.242.238.88:<0.27775.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1002_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:49:49.535,ns_1@10.242.238.88:<0.29172.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[756]}, {checkpoints,[{756,1}]}, {name,<<"rebalance_756">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[756]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"756"}]} [rebalance:debug,2014-08-19T16:49:49.536,ns_1@10.242.238.88:<0.29172.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29720.0> [rebalance:info,2014-08-19T16:49:49.537,ns_1@10.242.238.88:<0.29172.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:49:49.538,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 749 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:debug,2014-08-19T16:49:49.539,ns_1@10.242.238.88:<0.29172.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:49:49.539,ns_1@10.242.238.88:<0.29172.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:49:49.539,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 749) [ns_server:debug,2014-08-19T16:49:49.540,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.540,ns_1@10.242.238.88:<0.26855.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 756 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:49:49.540,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 757 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:49:49.540,ns_1@10.242.238.88:<0.29723.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 757 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:debug,2014-08-19T16:49:49.542,ns_1@10.242.238.88:<0.26863.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:49:49.546,ns_1@10.242.238.88:<0.26863.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_756_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:49:49.551,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.552,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.552,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.552,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{757, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.552,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:49:49.555,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/921. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:49.555,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",921,active,0} [rebalance:info,2014-08-19T16:49:49.560,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 757 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.560,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 757) [ns_server:debug,2014-08-19T16:49:49.561,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.561,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1007 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:49.561,ns_1@10.242.238.88:<0.29736.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1007 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:49:49.582,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.583,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.583,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1007, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.583,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.585,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:49.593,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1007 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.593,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1007) [ns_server:debug,2014-08-19T16:49:49.595,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.595,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 754 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:49:49.595,ns_1@10.242.238.88:<0.29747.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 754 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:49:49.605,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.606,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.606,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.607,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.607,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{754, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:49:49.613,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 754 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.647,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 754) [ns_server:debug,2014-08-19T16:49:49.648,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.648,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 765 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:49:49.648,ns_1@10.242.238.88:<0.29772.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 765 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:49:49.658,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.658,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.658,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.659,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.659,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{765, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:49:49.669,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 765 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.670,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 765) [ns_server:debug,2014-08-19T16:49:49.671,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.671,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 747 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:49:49.671,ns_1@10.242.238.88:<0.29783.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 747 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:49:49.680,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 919. Nacking mccouch update. [views:debug,2014-08-19T16:49:49.680,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/919. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:49.680,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",919,active,0} [ns_server:debug,2014-08-19T16:49:49.682,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009, 932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258, 1008,931,748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568, 202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878, 384,256,1006,929,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332, 694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514, 148,876,510,382,1004,927,744,616,250,122,978,850,484,356,718,590,224,952,824, 458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900,772,406,278,951, 640,512,146,938,874,810,508,444,380,316,1002,989,925,742,678,614,550,248,184, 120,976,912,848,784,482,418,354,290,963,716,652,588,524,222,158,950,886,822, 456,392,328,264,1014,937,754,690,626,562,196,132,1001,988,924,860,796,494, 430,366,302,975,728,664,600,536,234,170,962,898,834,770,468,404,340,276,949, 766,702,638,574,208,144,1013,936,872,808,506,442,378,314,1000,987,923,740, 676,612,548,246,182,118,974,910,846,782,480,416,352,288,961,714,650,586,522, 220,156,948,884,820,454,390,326,262,1012,999,935,752,688,624,560,194,130,986, 922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896,832,768,466, 402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312, 985,921,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,712, 648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192, 920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502, 374,919,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995, 684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138, 994,866,500,372,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448, 320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941,758, 630,136,992,864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017,940, 812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266,1016, 939,756,628,134,990,862,496,368,730,602,236,108,964,836,470,342,704,576,210, 1015] [ns_server:debug,2014-08-19T16:49:49.689,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.690,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.690,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{747, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.690,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.690,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:49.696,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 747 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.697,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 747) [ns_server:debug,2014-08-19T16:49:49.698,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.698,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 752 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:49:49.698,ns_1@10.242.238.88:<0.29794.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 752 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:49:49.713,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.714,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{752, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.715,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.715,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.717,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:49.725,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 752 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.726,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 752) [ns_server:debug,2014-08-19T16:49:49.727,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.727,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1018 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:49.727,ns_1@10.242.238.88:<0.29805.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1018 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [views:debug,2014-08-19T16:49:49.731,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/919. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:49.731,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",919,active,0} [ns_server:debug,2014-08-19T16:49:49.743,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.744,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.744,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1018, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.745,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.745,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:49:49.752,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1018 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.753,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1018) [ns_server:debug,2014-08-19T16:49:49.753,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.753,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1014 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:49.754,ns_1@10.242.238.88:<0.29815.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1014 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:49:49.763,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.764,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1014, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.765,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.765,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.765,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:49.773,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1014 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.774,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1014) [ns_server:debug,2014-08-19T16:49:49.775,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.775,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 761 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:49:49.775,ns_1@10.242.238.88:<0.29826.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 761 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:49:49.789,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.789,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.790,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{761, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.790,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.790,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:49.796,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 761 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.797,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 761) [ns_server:debug,2014-08-19T16:49:49.798,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.798,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1021 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:49.798,ns_1@10.242.238.88:<0.29837.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1021 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:49:49.815,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.816,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.816,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.816,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.816,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1021, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:49:49.826,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1021 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.827,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1021) [ns_server:debug,2014-08-19T16:49:49.828,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.828,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1009 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:49.828,ns_1@10.242.238.88:<0.29862.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1009 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:49:49.839,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.840,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.840,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1009, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.840,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.841,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:49.847,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1009 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.848,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1009) [ns_server:debug,2014-08-19T16:49:49.849,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.849,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1003 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:49.849,ns_1@10.242.238.88:<0.29872.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1003 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:49:49.872,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.872,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.872,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.872,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1003, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.873,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:49.882,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1003 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.882,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1003) [ns_server:debug,2014-08-19T16:49:49.884,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.884,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1012 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:49.884,ns_1@10.242.238.88:<0.29884.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1012 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:49:49.889,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 917. Nacking mccouch update. [views:debug,2014-08-19T16:49:49.890,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/917. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:49.890,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",917,active,0} [ns_server:debug,2014-08-19T16:49:49.891,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009, 932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258, 1008,931,748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568, 202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878, 384,256,1006,929,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332, 694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514, 148,876,510,382,1004,927,744,616,250,122,978,850,484,356,718,590,224,952,824, 458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900,772,406,278,951, 640,512,146,938,874,810,508,444,380,316,1002,989,925,742,678,614,550,248,184, 120,976,912,848,784,482,418,354,290,963,716,652,588,524,222,158,950,886,822, 456,392,328,264,1014,937,754,690,626,562,196,132,1001,988,924,860,796,494, 430,366,302,975,728,664,600,536,234,170,962,898,834,770,468,404,340,276,949, 766,702,638,574,208,144,1013,936,872,808,506,442,378,314,1000,987,923,740, 676,612,548,246,182,118,974,910,846,782,480,416,352,288,961,714,650,586,522, 220,156,948,884,820,454,390,326,262,1012,999,935,752,688,624,560,194,130,986, 922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896,832,768,466, 402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312, 985,921,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,712, 648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192, 920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502, 374,919,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995, 684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138, 994,866,500,372,917,734,606,240,112,968,840,474,346,708,580,214,1019,942,814, 448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941, 758,630,136,992,864,498,370,732,604,238,110,966,838,472,344,706,578,212,1017, 940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266, 1016,939,756,628,134,990,862,496,368,730,602,236,108,964,836,470,342,704,576, 210,1015] [ns_server:debug,2014-08-19T16:49:49.895,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.895,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.896,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.896,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1012, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.896,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:49.901,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1012 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.902,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1012) [ns_server:debug,2014-08-19T16:49:49.903,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.903,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1020 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:49.903,ns_1@10.242.238.88:<0.29894.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1020 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:49:49.915,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.916,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.917,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.917,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.917,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1020, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:49:49.925,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1020 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.926,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1020) [ns_server:debug,2014-08-19T16:49:49.927,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.927,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1011 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:49.927,ns_1@10.242.238.88:<0.29905.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1011 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:49:49.938,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.939,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.939,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.939,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1011, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.939,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:49.949,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1011 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:49.949,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1011) [ns_server:debug,2014-08-19T16:49:49.950,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.950,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1002 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:49:49.950,ns_1@10.242.238.88:<0.29915.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1002 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:49:49.963,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.964,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.964,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.964,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1002, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.964,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:49.974,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1002 done. Will delete it on: ['ns_1@10.242.238.88'] [views:debug,2014-08-19T16:49:49.975,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/917. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:49.975,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",917,active,0} [ns_server:debug,2014-08-19T16:49:49.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1002) [ns_server:debug,2014-08-19T16:49:49.976,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:49:49.976,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 756 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:49:49.976,ns_1@10.242.238.88:<0.29927.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 756 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:49:49.991,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.992,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:49:49.992,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:49:49.992,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{756, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:49:49.993,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:49:50.000,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 756 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:49:50.000,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 756) [ns_server:debug,2014-08-19T16:49:50.002,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{compact,'ns_1@10.242.238.88'}] [ns_server:debug,2014-08-19T16:49:50.002,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"default">>] [ns_server:debug,2014-08-19T16:49:50.003,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:compact_next_bucket:1453]Going to spawn bucket compaction with forced view compaction for bucket default [ns_server:debug,2014-08-19T16:49:50.003,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:compact_next_bucket:1482]Spawned 'uninhibited' compaction for default [ns_server:info,2014-08-19T16:49:50.006,ns_1@10.242.238.88:<0.29937.0>:compaction_daemon:check_all_dbs_exist:1611]Skipping compaction of bucket `default` since at least database `default/0` seems to be missing. [ns_server:debug,2014-08-19T16:49:50.006,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:49:50.006,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:handle_info:203]noted compaction done: {compact,'ns_1@10.242.238.88'} [ns_server:debug,2014-08-19T16:49:50.006,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:49:50.011,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1001, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:50.012,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1001, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.29945.0>) [ns_server:debug,2014-08-19T16:49:50.012,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1001) [ns_server:debug,2014-08-19T16:49:50.012,ns_1@10.242.238.88:<0.29946.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:50.012,ns_1@10.242.238.88:<0.29945.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1001 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:50.012,ns_1@10.242.238.88:<0.29953.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1001 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:50.012,ns_1@10.242.238.88:<0.29954.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1001 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:50.016,ns_1@10.242.238.88:<0.29955.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1001 into 'ns_1@10.242.238.90' is <18125.20149.0> [ns_server:debug,2014-08-19T16:49:50.018,ns_1@10.242.238.88:<0.29955.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1001 into 'ns_1@10.242.238.91' is <18126.20711.0> [rebalance:debug,2014-08-19T16:49:50.018,ns_1@10.242.238.88:<0.29945.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1001 is <0.29955.0> [ns_server:debug,2014-08-19T16:49:50.060,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,51588}, tap_estimate, {replica_building,"default",1001,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20149.0>, <<"replication_building_1001_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:50.069,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 915. Nacking mccouch update. [views:debug,2014-08-19T16:49:50.069,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/915. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:50.071,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009, 932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258, 1008,931,748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568, 202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878, 384,256,1006,929,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332, 694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514, 148,876,510,382,1004,927,744,616,250,122,978,850,484,356,718,590,224,952,824, 458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900,772,406,278,951, 640,512,146,938,874,810,508,444,380,316,1002,989,925,742,678,614,550,248,184, 120,976,912,848,784,482,418,354,290,963,716,652,588,524,222,158,950,886,822, 456,392,328,264,1014,937,754,690,626,562,196,132,1001,988,924,860,796,494, 430,366,302,975,728,664,600,536,234,170,962,898,834,770,468,404,340,276,949, 766,702,638,574,208,144,1013,936,872,808,506,442,378,314,1000,987,923,740, 676,612,548,246,182,118,974,910,846,782,480,416,352,288,961,714,650,586,522, 220,156,948,884,820,454,390,326,262,1012,999,935,752,688,624,560,194,130,986, 922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896,832,768,466, 402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312, 985,921,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,712, 648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192, 920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502, 374,919,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995, 684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138, 994,866,500,372,917,734,606,240,112,968,840,474,346,708,580,214,1019,942,814, 448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941, 758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,706,578,212, 1017,940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394, 266,1016,939,756,628,134,990,862,496,368,730,602,236,108,964,836,470,342,704, 576,210,1015] [ns_server:debug,2014-08-19T16:49:50.073,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",915,active,0} [ns_server:debug,2014-08-19T16:49:50.076,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,67178}, tap_estimate, {replica_building,"default",1001,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20711.0>, <<"replication_building_1001_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:50.076,ns_1@10.242.238.88:<0.29962.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.20711.0>}, {'ns_1@10.242.238.90',<18125.20149.0>}]) [rebalance:info,2014-08-19T16:49:50.076,ns_1@10.242.238.88:<0.29945.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:50.077,ns_1@10.242.238.88:<0.29945.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1001 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:50.078,ns_1@10.242.238.88:<0.29945.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:50.078,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1001, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:50.083,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{746, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:50.083,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",746, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.29974.0>) [ns_server:debug,2014-08-19T16:49:50.083,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 746) [ns_server:debug,2014-08-19T16:49:50.083,ns_1@10.242.238.88:<0.29975.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:50.083,ns_1@10.242.238.88:<0.29975.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:50.084,ns_1@10.242.238.88:<0.29974.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 746 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:50.084,ns_1@10.242.238.88:<0.29980.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 746 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:50.084,ns_1@10.242.238.88:<0.29981.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 746 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:50.087,ns_1@10.242.238.88:<0.29982.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 746 into 'ns_1@10.242.238.91' is <18126.20718.0> [ns_server:debug,2014-08-19T16:49:50.089,ns_1@10.242.238.88:<0.29982.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 746 into 'ns_1@10.242.238.90' is <18125.20168.0> [rebalance:debug,2014-08-19T16:49:50.090,ns_1@10.242.238.88:<0.29974.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 746 is <0.29982.0> [views:debug,2014-08-19T16:49:50.111,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/915. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:50.111,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",915,active,0} [ns_server:debug,2014-08-19T16:49:50.126,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,117154}, tap_estimate, {replica_building,"default",746,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20718.0>, <<"replication_building_746_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:50.141,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,132812}, tap_estimate, {replica_building,"default",746,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20168.0>, <<"replication_building_746_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:50.142,ns_1@10.242.238.88:<0.29983.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.20168.0>}, {'ns_1@10.242.238.91',<18126.20718.0>}]) [rebalance:info,2014-08-19T16:49:50.142,ns_1@10.242.238.88:<0.29974.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:50.143,ns_1@10.242.238.88:<0.29974.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 746 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:50.143,ns_1@10.242.238.88:<0.29974.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:50.144,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{746, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:50.148,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{490, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:50.148,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",490, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.30003.0>) [ns_server:debug,2014-08-19T16:49:50.149,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 490) [ns_server:debug,2014-08-19T16:49:50.149,ns_1@10.242.238.88:<0.30004.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:50.150,ns_1@10.242.238.88:<0.30004.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:50.150,ns_1@10.242.238.88:<0.30003.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 490 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:50.150,ns_1@10.242.238.88:<0.30014.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 490 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:50.150,ns_1@10.242.238.88:<0.30015.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 490 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:50.154,ns_1@10.242.238.88:<0.30017.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 490 into 'ns_1@10.242.238.91' is <18126.20737.0> [ns_server:debug,2014-08-19T16:49:50.157,ns_1@10.242.238.88:<0.30017.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 490 into 'ns_1@10.242.238.89' is <18124.25699.0> [rebalance:debug,2014-08-19T16:49:50.157,ns_1@10.242.238.88:<0.30003.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 490 is <0.30017.0> [ns_server:debug,2014-08-19T16:49:50.186,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 913. Nacking mccouch update. [views:debug,2014-08-19T16:49:50.186,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/913. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:50.186,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",913,active,0} [ns_server:debug,2014-08-19T16:49:50.188,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009, 932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258, 1008,931,748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568, 202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878, 384,256,1006,929,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332, 694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514, 148,876,510,382,1004,927,744,616,250,122,978,850,484,356,718,590,224,952,824, 458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900,772,406,278,951, 640,512,146,938,874,810,508,444,380,316,1002,989,925,742,678,614,550,248,184, 120,976,912,848,784,482,418,354,290,963,716,652,588,524,222,158,950,886,822, 456,392,328,264,1014,937,754,690,626,562,196,132,1001,988,924,860,796,494, 430,366,302,975,728,664,600,536,234,170,962,898,834,770,468,404,340,276,949, 766,702,638,574,208,144,1013,936,872,808,506,442,378,314,1000,987,923,740, 676,612,548,246,182,118,974,910,846,782,480,416,352,288,961,714,650,586,522, 220,156,948,884,820,454,390,326,262,1012,999,935,752,688,624,560,194,130,986, 922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896,832,768,466, 402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312, 985,921,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,712, 648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192, 920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502, 374,919,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995, 684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138, 994,866,500,372,917,734,606,240,112,968,840,474,346,708,580,214,1019,942,814, 448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941, 758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,706,578,212, 1017,940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394, 266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470,342, 704,576,210,1015] [ns_server:debug,2014-08-19T16:49:50.198,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,189401}, tap_estimate, {replica_building,"default",490,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20737.0>, <<"replication_building_490_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:50.216,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,207817}, tap_estimate, {replica_building,"default",490,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25699.0>, <<"replication_building_490_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:50.217,ns_1@10.242.238.88:<0.30018.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25699.0>}, {'ns_1@10.242.238.91',<18126.20737.0>}]) [rebalance:info,2014-08-19T16:49:50.217,ns_1@10.242.238.88:<0.30003.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:50.219,ns_1@10.242.238.88:<0.30003.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 490 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:50.219,ns_1@10.242.238.88:<0.30003.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:50.220,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{490, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:50.224,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{1000, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:50.224,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default", 1000, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.30030.0>) [ns_server:debug,2014-08-19T16:49:50.224,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 1000) [ns_server:debug,2014-08-19T16:49:50.225,ns_1@10.242.238.88:<0.30031.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:50.225,ns_1@10.242.238.88:<0.30031.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:50.225,ns_1@10.242.238.88:<0.30030.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1000 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:50.225,ns_1@10.242.238.88:<0.30036.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1000 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:50.225,ns_1@10.242.238.88:<0.30037.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1000 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:50.229,ns_1@10.242.238.88:<0.30038.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1000 into 'ns_1@10.242.238.90' is <18125.20174.0> [ns_server:debug,2014-08-19T16:49:50.232,ns_1@10.242.238.88:<0.30038.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 1000 into 'ns_1@10.242.238.91' is <18126.20742.0> [rebalance:debug,2014-08-19T16:49:50.232,ns_1@10.242.238.88:<0.30030.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 1000 is <0.30038.0> [views:debug,2014-08-19T16:49:50.245,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/913. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:50.246,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",913,active,0} [ns_server:debug,2014-08-19T16:49:50.269,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,260698}, tap_estimate, {replica_building,"default",1000,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20174.0>, <<"replication_building_1000_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:50.284,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,275309}, tap_estimate, {replica_building,"default",1000,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20742.0>, <<"replication_building_1000_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:50.284,ns_1@10.242.238.88:<0.30039.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.20742.0>}, {'ns_1@10.242.238.90',<18125.20174.0>}]) [rebalance:info,2014-08-19T16:49:50.284,ns_1@10.242.238.88:<0.30030.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:50.285,ns_1@10.242.238.88:<0.30030.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1000 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:50.286,ns_1@10.242.238.88:<0.30030.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:50.286,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{1000, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:50.291,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{745, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:50.291,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",745, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.30051.0>) [ns_server:debug,2014-08-19T16:49:50.291,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 745) [ns_server:debug,2014-08-19T16:49:50.291,ns_1@10.242.238.88:<0.30052.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:50.292,ns_1@10.242.238.88:<0.30052.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:50.292,ns_1@10.242.238.88:<0.30051.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 745 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:50.292,ns_1@10.242.238.88:<0.30057.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 745 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:50.292,ns_1@10.242.238.88:<0.30058.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 745 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:50.296,ns_1@10.242.238.88:<0.30059.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 745 into 'ns_1@10.242.238.91' is <18126.20748.0> [ns_server:debug,2014-08-19T16:49:50.299,ns_1@10.242.238.88:<0.30059.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 745 into 'ns_1@10.242.238.90' is <18125.20193.0> [rebalance:debug,2014-08-19T16:49:50.299,ns_1@10.242.238.88:<0.30051.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 745 is <0.30059.0> [ns_server:debug,2014-08-19T16:49:50.338,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,329123}, tap_estimate, {replica_building,"default",745,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20748.0>, <<"replication_building_745_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:50.353,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,344147}, tap_estimate, {replica_building,"default",745,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20193.0>, <<"replication_building_745_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:50.353,ns_1@10.242.238.88:<0.30060.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.20193.0>}, {'ns_1@10.242.238.91',<18126.20748.0>}]) [rebalance:info,2014-08-19T16:49:50.353,ns_1@10.242.238.88:<0.30051.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:50.354,ns_1@10.242.238.88:<0.30051.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 745 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:50.354,ns_1@10.242.238.88:<0.30051.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:50.355,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{745, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:50.360,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{489, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:50.360,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",489, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.30086.0>) [ns_server:debug,2014-08-19T16:49:50.360,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 489) [ns_server:debug,2014-08-19T16:49:50.360,ns_1@10.242.238.88:<0.30087.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:50.360,ns_1@10.242.238.88:<0.30087.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:50.361,ns_1@10.242.238.88:<0.30086.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 489 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:50.361,ns_1@10.242.238.88:<0.30092.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 489 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:50.361,ns_1@10.242.238.88:<0.30093.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 489 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:50.365,ns_1@10.242.238.88:<0.30094.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 489 into 'ns_1@10.242.238.91' is <18126.20767.0> [ns_server:debug,2014-08-19T16:49:50.368,ns_1@10.242.238.88:<0.30094.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 489 into 'ns_1@10.242.238.89' is <18124.25719.0> [rebalance:debug,2014-08-19T16:49:50.368,ns_1@10.242.238.88:<0.30086.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 489 is <0.30094.0> [ns_server:debug,2014-08-19T16:49:50.371,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 911. Nacking mccouch update. [views:debug,2014-08-19T16:49:50.371,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/911. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:50.371,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",911,active,0} [ns_server:debug,2014-08-19T16:49:50.372,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009, 932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258, 1008,931,748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568, 202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878, 384,256,1006,929,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332, 694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514, 148,876,510,382,1004,927,744,616,250,122,978,850,484,356,718,590,224,952,824, 458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900,772,406,278,951, 640,512,146,938,874,810,508,444,380,316,1002,989,925,742,678,614,550,248,184, 120,976,912,848,784,482,418,354,290,963,716,652,588,524,222,158,950,886,822, 456,392,328,264,1014,937,754,690,626,562,196,132,1001,988,924,860,796,494, 430,366,302,975,911,728,664,600,536,234,170,962,898,834,770,468,404,340,276, 949,766,702,638,574,208,144,1013,936,872,808,506,442,378,314,1000,987,923, 740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961,714,650,586, 522,220,156,948,884,820,454,390,326,262,1012,999,935,752,688,624,560,194,130, 986,922,858,794,492,428,364,300,973,726,662,598,534,232,168,960,896,832,768, 466,402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504,440,376, 312,985,921,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558, 192,920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868, 502,374,919,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322, 995,684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632, 138,994,866,500,372,917,734,606,240,112,968,840,474,346,708,580,214,1019,942, 814,448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018, 941,758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,706,578, 212,1017,940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888, 394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470, 342,704,576,210,1015] [ns_server:debug,2014-08-19T16:49:50.404,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,395958}, tap_estimate, {replica_building,"default",489,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20767.0>, <<"replication_building_489_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:50.420,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,411676}, tap_estimate, {replica_building,"default",489,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25719.0>, <<"replication_building_489_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:50.421,ns_1@10.242.238.88:<0.30095.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25719.0>}, {'ns_1@10.242.238.91',<18126.20767.0>}]) [rebalance:info,2014-08-19T16:49:50.421,ns_1@10.242.238.88:<0.30086.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:50.422,ns_1@10.242.238.88:<0.30086.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 489 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:50.422,ns_1@10.242.238.88:<0.30086.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:50.422,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{489, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:50.427,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{999, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:50.427,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",999, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.30107.0>) [ns_server:debug,2014-08-19T16:49:50.427,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 999) [ns_server:debug,2014-08-19T16:49:50.428,ns_1@10.242.238.88:<0.30108.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:50.428,ns_1@10.242.238.88:<0.30108.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:50.428,ns_1@10.242.238.88:<0.30107.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 999 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:50.428,ns_1@10.242.238.88:<0.30113.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 999 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:50.428,ns_1@10.242.238.88:<0.30114.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 999 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:50.432,ns_1@10.242.238.88:<0.30115.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 999 into 'ns_1@10.242.238.90' is <18125.20199.0> [ns_server:debug,2014-08-19T16:49:50.434,ns_1@10.242.238.88:<0.30115.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 999 into 'ns_1@10.242.238.91' is <18126.20772.0> [rebalance:debug,2014-08-19T16:49:50.434,ns_1@10.242.238.88:<0.30107.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 999 is <0.30115.0> [views:debug,2014-08-19T16:49:50.455,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/911. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:50.455,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",911,active,0} [ns_server:debug,2014-08-19T16:49:50.471,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,462838}, tap_estimate, {replica_building,"default",999,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20199.0>, <<"replication_building_999_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:50.490,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,481199}, tap_estimate, {replica_building,"default",999,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20772.0>, <<"replication_building_999_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:50.490,ns_1@10.242.238.88:<0.30117.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.20772.0>}, {'ns_1@10.242.238.90',<18125.20199.0>}]) [rebalance:info,2014-08-19T16:49:50.490,ns_1@10.242.238.88:<0.30107.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:50.491,ns_1@10.242.238.88:<0.30107.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 999 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:50.492,ns_1@10.242.238.88:<0.30107.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:50.492,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{999, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:50.496,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{744, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:50.497,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",744, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.30129.0>) [ns_server:debug,2014-08-19T16:49:50.497,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 744) [ns_server:debug,2014-08-19T16:49:50.497,ns_1@10.242.238.88:<0.30130.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:50.497,ns_1@10.242.238.88:<0.30130.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:50.497,ns_1@10.242.238.88:<0.30129.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 744 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:50.497,ns_1@10.242.238.88:<0.30135.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 744 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:50.498,ns_1@10.242.238.88:<0.30136.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 744 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:50.501,ns_1@10.242.238.88:<0.30137.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 744 into 'ns_1@10.242.238.91' is <18126.20778.0> [ns_server:debug,2014-08-19T16:49:50.504,ns_1@10.242.238.88:<0.30137.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 744 into 'ns_1@10.242.238.90' is <18125.20218.0> [rebalance:debug,2014-08-19T16:49:50.504,ns_1@10.242.238.88:<0.30129.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 744 is <0.30137.0> [ns_server:debug,2014-08-19T16:49:50.540,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,531102}, tap_estimate, {replica_building,"default",744,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20778.0>, <<"replication_building_744_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:50.555,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,546463}, tap_estimate, {replica_building,"default",744,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20218.0>, <<"replication_building_744_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:50.555,ns_1@10.242.238.88:<0.30138.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.20218.0>}, {'ns_1@10.242.238.91',<18126.20778.0>}]) [rebalance:info,2014-08-19T16:49:50.556,ns_1@10.242.238.88:<0.30129.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:50.556,ns_1@10.242.238.88:<0.30129.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 744 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:50.557,ns_1@10.242.238.88:<0.30129.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:50.557,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{744, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:50.562,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{488, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:50.562,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",488, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.30164.0>) [ns_server:debug,2014-08-19T16:49:50.562,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 488) [ns_server:debug,2014-08-19T16:49:50.563,ns_1@10.242.238.88:<0.30165.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:50.563,ns_1@10.242.238.88:<0.30165.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:50.563,ns_1@10.242.238.88:<0.30164.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 488 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:50.563,ns_1@10.242.238.88:<0.30170.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 488 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:50.563,ns_1@10.242.238.88:<0.30171.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 488 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:50.567,ns_1@10.242.238.88:<0.30172.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 488 into 'ns_1@10.242.238.91' is <18126.20803.0> [ns_server:debug,2014-08-19T16:49:50.570,ns_1@10.242.238.88:<0.30172.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 488 into 'ns_1@10.242.238.89' is <18124.25739.0> [rebalance:debug,2014-08-19T16:49:50.570,ns_1@10.242.238.88:<0.30164.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 488 is <0.30172.0> [ns_server:debug,2014-08-19T16:49:50.607,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,597976}, tap_estimate, {replica_building,"default",488,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20803.0>, <<"replication_building_488_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:50.621,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,612955}, tap_estimate, {replica_building,"default",488,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25739.0>, <<"replication_building_488_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:50.622,ns_1@10.242.238.88:<0.30173.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25739.0>}, {'ns_1@10.242.238.91',<18126.20803.0>}]) [rebalance:info,2014-08-19T16:49:50.622,ns_1@10.242.238.88:<0.30164.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:50.622,ns_1@10.242.238.88:<0.30164.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 488 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:50.623,ns_1@10.242.238.88:<0.30164.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:50.623,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{488, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:50.628,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{998, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:50.628,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",998, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.30185.0>) [ns_server:debug,2014-08-19T16:49:50.628,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 998) [ns_server:debug,2014-08-19T16:49:50.628,ns_1@10.242.238.88:<0.30186.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:50.629,ns_1@10.242.238.88:<0.30186.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:50.629,ns_1@10.242.238.88:<0.30185.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 998 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:50.629,ns_1@10.242.238.88:<0.30191.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 998 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:50.629,ns_1@10.242.238.88:<0.30192.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 998 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:50.629,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 909. Nacking mccouch update. [views:debug,2014-08-19T16:49:50.629,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/909. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:50.630,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",909,active,0} [ns_server:debug,2014-08-19T16:49:50.631,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,724,596,230,958,830,464,336,698,570,204,1009, 932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386,258, 1008,931,748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696,568, 202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150,878, 384,256,1006,929,746,618,252,124,980,852,486,358,720,592,226,954,826,460,332, 694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642,514, 148,876,510,382,1004,927,744,616,250,122,978,850,484,356,718,590,224,952,824, 458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900,772,406,278,951, 640,512,146,874,508,380,1002,989,925,742,678,614,550,248,184,120,976,912,848, 784,482,418,354,290,963,716,652,588,524,222,158,950,886,822,456,392,328,264, 1014,937,754,690,626,562,196,132,1001,988,924,860,796,494,430,366,302,975, 911,728,664,600,536,234,170,962,898,834,770,468,404,340,276,949,766,702,638, 574,208,144,1013,936,872,808,506,442,378,314,1000,987,923,740,676,612,548, 246,182,118,974,910,846,782,480,416,352,288,961,714,650,586,522,220,156,948, 884,820,454,390,326,262,1012,999,935,752,688,624,560,194,130,986,922,858,794, 492,428,364,300,973,909,726,662,598,534,232,168,960,896,832,768,466,402,338, 274,947,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921, 738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,712,648,584, 520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192,920,792, 426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919, 736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995,684,556, 190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994,866, 500,372,917,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448,320, 993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941,758,630, 136,992,864,498,370,915,732,604,238,110,966,838,472,344,706,578,212,1017,940, 812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266,1016, 939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470,342,704,576, 210,1015,938,810,444,316] [ns_server:debug,2014-08-19T16:49:50.633,ns_1@10.242.238.88:<0.30193.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 998 into 'ns_1@10.242.238.90' is <18125.20238.0> [ns_server:debug,2014-08-19T16:49:50.634,ns_1@10.242.238.88:<0.30193.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 998 into 'ns_1@10.242.238.91' is <18126.20809.0> [rebalance:debug,2014-08-19T16:49:50.635,ns_1@10.242.238.88:<0.30185.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 998 is <0.30193.0> [ns_server:debug,2014-08-19T16:49:50.670,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,661341}, tap_estimate, {replica_building,"default",998,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20238.0>, <<"replication_building_998_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:50.686,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,677324}, tap_estimate, {replica_building,"default",998,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20809.0>, <<"replication_building_998_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:50.686,ns_1@10.242.238.88:<0.30194.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.20809.0>}, {'ns_1@10.242.238.90',<18125.20238.0>}]) [rebalance:info,2014-08-19T16:49:50.686,ns_1@10.242.238.88:<0.30185.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:50.687,ns_1@10.242.238.88:<0.30185.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 998 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:50.687,ns_1@10.242.238.88:<0.30185.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:50.688,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{998, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:50.692,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{743, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:50.693,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",743, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.30206.0>) [ns_server:debug,2014-08-19T16:49:50.693,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 743) [ns_server:debug,2014-08-19T16:49:50.693,ns_1@10.242.238.88:<0.30207.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:50.693,ns_1@10.242.238.88:<0.30207.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:50.693,ns_1@10.242.238.88:<0.30206.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 743 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:50.693,ns_1@10.242.238.88:<0.30212.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 743 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:50.694,ns_1@10.242.238.88:<0.30213.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 743 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:50.697,ns_1@10.242.238.88:<0.30214.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 743 into 'ns_1@10.242.238.91' is <18126.20815.0> [ns_server:debug,2014-08-19T16:49:50.700,ns_1@10.242.238.88:<0.30214.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 743 into 'ns_1@10.242.238.90' is <18125.20243.0> [rebalance:debug,2014-08-19T16:49:50.700,ns_1@10.242.238.88:<0.30206.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 743 is <0.30214.0> [views:debug,2014-08-19T16:49:50.714,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/909. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:50.714,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",909,active,0} [ns_server:debug,2014-08-19T16:49:50.737,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,728469}, tap_estimate, {replica_building,"default",743,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20815.0>, <<"replication_building_743_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:50.753,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,744203}, tap_estimate, {replica_building,"default",743,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20243.0>, <<"replication_building_743_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:50.753,ns_1@10.242.238.88:<0.30215.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.20243.0>}, {'ns_1@10.242.238.91',<18126.20815.0>}]) [rebalance:info,2014-08-19T16:49:50.753,ns_1@10.242.238.88:<0.30206.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:50.754,ns_1@10.242.238.88:<0.30206.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 743 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:50.754,ns_1@10.242.238.88:<0.30206.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:50.755,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{743, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:50.759,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{487, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:50.760,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",487, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.30227.0>) [ns_server:debug,2014-08-19T16:49:50.760,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 487) [ns_server:debug,2014-08-19T16:49:50.760,ns_1@10.242.238.88:<0.30228.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:50.760,ns_1@10.242.238.88:<0.30228.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:50.760,ns_1@10.242.238.88:<0.30227.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 487 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:50.761,ns_1@10.242.238.88:<0.30233.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 487 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:50.761,ns_1@10.242.238.88:<0.30234.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 487 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:50.764,ns_1@10.242.238.88:<0.30235.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 487 into 'ns_1@10.242.238.91' is <18126.20834.0> [ns_server:debug,2014-08-19T16:49:50.767,ns_1@10.242.238.88:<0.30235.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 487 into 'ns_1@10.242.238.89' is <18124.25759.0> [rebalance:debug,2014-08-19T16:49:50.767,ns_1@10.242.238.88:<0.30227.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 487 is <0.30235.0> [ns_server:debug,2014-08-19T16:49:50.803,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,794598}, tap_estimate, {replica_building,"default",487,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20834.0>, <<"replication_building_487_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:50.820,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,811878}, tap_estimate, {replica_building,"default",487,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25759.0>, <<"replication_building_487_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:50.821,ns_1@10.242.238.88:<0.30236.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25759.0>}, {'ns_1@10.242.238.91',<18126.20834.0>}]) [rebalance:info,2014-08-19T16:49:50.821,ns_1@10.242.238.88:<0.30227.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:50.821,ns_1@10.242.238.88:<0.30227.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 487 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:50.822,ns_1@10.242.238.88:<0.30227.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:50.822,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{487, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:50.827,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{997, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:50.827,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",997, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.30262.0>) [ns_server:debug,2014-08-19T16:49:50.827,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 997) [ns_server:debug,2014-08-19T16:49:50.827,ns_1@10.242.238.88:<0.30263.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:50.828,ns_1@10.242.238.88:<0.30263.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:50.828,ns_1@10.242.238.88:<0.30262.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 997 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:50.828,ns_1@10.242.238.88:<0.30268.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 997 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:50.828,ns_1@10.242.238.88:<0.30269.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 997 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:50.832,ns_1@10.242.238.88:<0.30270.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 997 into 'ns_1@10.242.238.90' is <18125.20263.0> [ns_server:debug,2014-08-19T16:49:50.834,ns_1@10.242.238.88:<0.30270.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 997 into 'ns_1@10.242.238.91' is <18126.20839.0> [rebalance:debug,2014-08-19T16:49:50.834,ns_1@10.242.238.88:<0.30262.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 997 is <0.30270.0> [ns_server:debug,2014-08-19T16:49:50.838,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 907. Nacking mccouch update. [views:debug,2014-08-19T16:49:50.839,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/907. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:50.839,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",907,active,0} [ns_server:debug,2014-08-19T16:49:50.841,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,698,570,204, 1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386, 258,1008,931,748,620,254,126,982,854,488,360,722,594,228,956,828,462,334,696, 568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516,150, 878,384,256,1006,929,746,618,252,124,980,852,486,358,720,592,226,954,826,460, 332,694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280,953,642, 514,148,876,510,382,1004,927,744,616,250,122,978,850,484,356,718,590,224,952, 824,458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900,772,406,278, 951,640,512,146,874,508,380,1002,989,925,742,678,614,550,248,184,120,976,912, 848,784,482,418,354,290,963,716,652,588,524,222,158,950,886,822,456,392,328, 264,1014,937,754,690,626,562,196,132,1001,988,924,860,796,494,430,366,302, 975,911,728,664,600,536,234,170,962,898,834,770,468,404,340,276,949,766,702, 638,574,208,144,1013,936,872,808,506,442,378,314,1000,987,923,740,676,612, 548,246,182,118,974,910,846,782,480,416,352,288,961,714,650,586,522,220,156, 948,884,820,454,390,326,262,1012,999,935,752,688,624,560,194,130,986,922,858, 794,492,428,364,300,973,909,726,662,598,534,232,168,960,896,832,768,466,402, 338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312,985, 921,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,712,648, 584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192,920, 792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374, 919,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995,684, 556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994, 866,500,372,917,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448, 320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941,758, 630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,706,578,212,1017, 940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266, 1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470,342,704, 576,210,1015,938,810,444,316] [ns_server:debug,2014-08-19T16:49:50.875,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,866226}, tap_estimate, {replica_building,"default",997,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20263.0>, <<"replication_building_997_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:50.890,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,881844}, tap_estimate, {replica_building,"default",997,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20839.0>, <<"replication_building_997_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:50.891,ns_1@10.242.238.88:<0.30271.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.20839.0>}, {'ns_1@10.242.238.90',<18125.20263.0>}]) [rebalance:info,2014-08-19T16:49:50.891,ns_1@10.242.238.88:<0.30262.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:50.892,ns_1@10.242.238.88:<0.30262.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 997 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:50.892,ns_1@10.242.238.88:<0.30262.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:50.893,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{997, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:50.897,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{742, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:50.897,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",742, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.30283.0>) [ns_server:debug,2014-08-19T16:49:50.897,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 742) [ns_server:debug,2014-08-19T16:49:50.898,ns_1@10.242.238.88:<0.30284.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:50.898,ns_1@10.242.238.88:<0.30284.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:50.898,ns_1@10.242.238.88:<0.30283.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 742 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:50.898,ns_1@10.242.238.88:<0.30289.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 742 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:50.898,ns_1@10.242.238.88:<0.30290.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 742 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:50.902,ns_1@10.242.238.88:<0.30291.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 742 into 'ns_1@10.242.238.91' is <18126.20860.0> [ns_server:debug,2014-08-19T16:49:50.904,ns_1@10.242.238.88:<0.30291.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 742 into 'ns_1@10.242.238.90' is <18125.20282.0> [rebalance:debug,2014-08-19T16:49:50.904,ns_1@10.242.238.88:<0.30283.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 742 is <0.30291.0> [views:debug,2014-08-19T16:49:50.923,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/907. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:50.923,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",907,active,0} [ns_server:debug,2014-08-19T16:49:50.941,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,932662}, tap_estimate, {replica_building,"default",742,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20860.0>, <<"replication_building_742_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:50.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,950536}, tap_estimate, {replica_building,"default",742,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20282.0>, <<"replication_building_742_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:50.960,ns_1@10.242.238.88:<0.30292.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.20282.0>}, {'ns_1@10.242.238.91',<18126.20860.0>}]) [rebalance:info,2014-08-19T16:49:50.960,ns_1@10.242.238.88:<0.30283.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:50.960,ns_1@10.242.238.88:<0.30283.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 742 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:50.960,ns_1@10.242.238.88:<0.30283.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:50.961,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{742, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:50.965,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{486, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:50.966,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",486, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.30304.0>) [ns_server:debug,2014-08-19T16:49:50.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 486) [ns_server:debug,2014-08-19T16:49:50.966,ns_1@10.242.238.88:<0.30305.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:50.966,ns_1@10.242.238.88:<0.30305.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:50.966,ns_1@10.242.238.88:<0.30304.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 486 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:50.967,ns_1@10.242.238.88:<0.30310.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 486 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:50.967,ns_1@10.242.238.88:<0.30311.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 486 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:50.970,ns_1@10.242.238.88:<0.30312.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 486 into 'ns_1@10.242.238.91' is <18126.20879.0> [ns_server:debug,2014-08-19T16:49:50.973,ns_1@10.242.238.88:<0.30312.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 486 into 'ns_1@10.242.238.89' is <18124.25779.0> [rebalance:debug,2014-08-19T16:49:50.973,ns_1@10.242.238.88:<0.30304.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 486 is <0.30312.0> [ns_server:debug,2014-08-19T16:49:51.008,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452590,999497}, tap_estimate, {replica_building,"default",486,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20879.0>, <<"replication_building_486_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:51.025,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,16763}, tap_estimate, {replica_building,"default",486,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25779.0>, <<"replication_building_486_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:51.026,ns_1@10.242.238.88:<0.30313.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25779.0>}, {'ns_1@10.242.238.91',<18126.20879.0>}]) [rebalance:info,2014-08-19T16:49:51.026,ns_1@10.242.238.88:<0.30304.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:51.027,ns_1@10.242.238.88:<0.30304.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 486 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:51.027,ns_1@10.242.238.88:<0.30304.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:51.028,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{486, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:51.032,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{996, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:51.032,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",996, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.30339.0>) [ns_server:debug,2014-08-19T16:49:51.033,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 996) [ns_server:debug,2014-08-19T16:49:51.033,ns_1@10.242.238.88:<0.30340.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:51.033,ns_1@10.242.238.88:<0.30340.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:51.033,ns_1@10.242.238.88:<0.30339.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 996 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:51.033,ns_1@10.242.238.88:<0.30345.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 996 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:51.033,ns_1@10.242.238.88:<0.30346.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 996 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:51.037,ns_1@10.242.238.88:<0.30347.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 996 into 'ns_1@10.242.238.90' is <18125.20288.0> [ns_server:debug,2014-08-19T16:49:51.040,ns_1@10.242.238.88:<0.30347.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 996 into 'ns_1@10.242.238.91' is <18126.20884.0> [rebalance:debug,2014-08-19T16:49:51.040,ns_1@10.242.238.88:<0.30339.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 996 is <0.30347.0> [ns_server:debug,2014-08-19T16:49:51.042,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 905. Nacking mccouch update. [views:debug,2014-08-19T16:49:51.042,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/905. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:51.042,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",905,active,0} [ns_server:debug,2014-08-19T16:49:51.044,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,698,570,204, 1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386, 258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462,334, 696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516, 150,878,384,256,1006,929,746,618,252,124,980,852,486,358,720,592,226,954,826, 460,332,694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280,953, 642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484,356,718,590,224, 952,824,458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900,772,406, 278,951,640,512,146,874,508,380,1002,989,925,742,678,614,550,248,184,120,976, 912,848,784,482,418,354,290,963,716,652,588,524,222,158,950,886,822,456,392, 328,264,1014,937,754,690,626,562,196,132,1001,988,924,860,796,494,430,366, 302,975,911,728,664,600,536,234,170,962,898,834,770,468,404,340,276,949,766, 702,638,574,208,144,1013,936,872,808,506,442,378,314,1000,987,923,740,676, 612,548,246,182,118,974,910,846,782,480,416,352,288,961,714,650,586,522,220, 156,948,884,820,454,390,326,262,1012,999,935,752,688,624,560,194,130,986,922, 858,794,492,428,364,300,973,909,726,662,598,534,232,168,960,896,832,768,466, 402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312, 985,921,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,712, 648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192, 920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502, 374,919,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995, 684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138, 994,866,500,372,917,734,606,240,112,968,840,474,346,708,580,214,1019,942,814, 448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941, 758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,706,578,212, 1017,940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394, 266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470,342, 704,576,210,1015,938,810,444,316] [ns_server:debug,2014-08-19T16:49:51.078,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,69795}, tap_estimate, {replica_building,"default",996,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20288.0>, <<"replication_building_996_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:51.096,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,87583}, tap_estimate, {replica_building,"default",996,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20884.0>, <<"replication_building_996_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:51.097,ns_1@10.242.238.88:<0.30348.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.20884.0>}, {'ns_1@10.242.238.90',<18125.20288.0>}]) [rebalance:info,2014-08-19T16:49:51.097,ns_1@10.242.238.88:<0.30339.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:51.098,ns_1@10.242.238.88:<0.30339.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 996 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:51.098,ns_1@10.242.238.88:<0.30339.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:51.099,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{996, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [views:debug,2014-08-19T16:49:51.101,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/905. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:51.101,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",905,active,0} [ns_server:debug,2014-08-19T16:49:51.103,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{741, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:51.104,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",741, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.30360.0>) [ns_server:debug,2014-08-19T16:49:51.104,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 741) [ns_server:debug,2014-08-19T16:49:51.104,ns_1@10.242.238.88:<0.30361.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:51.104,ns_1@10.242.238.88:<0.30361.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:51.104,ns_1@10.242.238.88:<0.30360.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 741 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:51.104,ns_1@10.242.238.88:<0.30366.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 741 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:51.104,ns_1@10.242.238.88:<0.30367.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 741 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:51.108,ns_1@10.242.238.88:<0.30368.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 741 into 'ns_1@10.242.238.91' is <18126.20904.0> [ns_server:debug,2014-08-19T16:49:51.111,ns_1@10.242.238.88:<0.30368.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 741 into 'ns_1@10.242.238.90' is <18125.20307.0> [rebalance:debug,2014-08-19T16:49:51.111,ns_1@10.242.238.88:<0.30360.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 741 is <0.30368.0> [ns_server:debug,2014-08-19T16:49:51.145,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,136435}, tap_estimate, {replica_building,"default",741,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20904.0>, <<"replication_building_741_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:51.163,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,154471}, tap_estimate, {replica_building,"default",741,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20307.0>, <<"replication_building_741_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:51.163,ns_1@10.242.238.88:<0.30369.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.20307.0>}, {'ns_1@10.242.238.91',<18126.20904.0>}]) [rebalance:info,2014-08-19T16:49:51.164,ns_1@10.242.238.88:<0.30360.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:51.164,ns_1@10.242.238.88:<0.30360.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 741 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:51.164,ns_1@10.242.238.88:<0.30360.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:51.165,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{741, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:51.170,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{485, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:51.170,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",485, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.30400.0>) [ns_server:debug,2014-08-19T16:49:51.170,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 485) [ns_server:debug,2014-08-19T16:49:51.170,ns_1@10.242.238.88:<0.30401.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:51.170,ns_1@10.242.238.88:<0.30401.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:51.171,ns_1@10.242.238.88:<0.30400.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 485 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:51.171,ns_1@10.242.238.88:<0.30406.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 485 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:51.171,ns_1@10.242.238.88:<0.30407.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 485 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:51.174,ns_1@10.242.238.88:<0.30408.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 485 into 'ns_1@10.242.238.91' is <18126.20909.0> [rebalance:info,2014-08-19T16:49:51.177,ns_1@10.242.238.88:<0.30164.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 488 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:49:51.177,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 488 state to active [ns_server:debug,2014-08-19T16:49:51.177,ns_1@10.242.238.88:<0.30408.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 485 into 'ns_1@10.242.238.89' is <18124.25799.0> [rebalance:debug,2014-08-19T16:49:51.177,ns_1@10.242.238.88:<0.30400.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 485 is <0.30408.0> [rebalance:info,2014-08-19T16:49:51.178,ns_1@10.242.238.88:<0.30164.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 488 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:51.179,ns_1@10.242.238.88:<0.30164.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:51.201,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 903. Nacking mccouch update. [views:debug,2014-08-19T16:49:51.201,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/903. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:51.201,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",903,active,0} [ns_server:debug,2014-08-19T16:49:51.202,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,698,570,204, 1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386, 258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462,334, 696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516, 150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592,226,954, 826,460,332,694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280, 953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484,356,718,590, 224,952,824,458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900,772, 406,278,951,640,512,146,874,508,380,1002,989,925,742,678,614,550,248,184,120, 976,912,848,784,482,418,354,290,963,716,652,588,524,222,158,950,886,822,456, 392,328,264,1014,937,754,690,626,562,196,132,1001,988,924,860,796,494,430, 366,302,975,911,728,664,600,536,234,170,962,898,834,770,468,404,340,276,949, 766,702,638,574,208,144,1013,936,872,808,506,442,378,314,1000,987,923,740, 676,612,548,246,182,118,974,910,846,782,480,416,352,288,961,714,650,586,522, 220,156,948,884,820,454,390,326,262,1012,999,935,752,688,624,560,194,130,986, 922,858,794,492,428,364,300,973,909,726,662,598,534,232,168,960,896,832,768, 466,402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504,440,376, 312,985,921,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558, 192,920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868, 502,374,919,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322, 995,684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632, 138,994,866,500,372,917,734,606,240,112,968,840,474,346,708,580,214,1019,942, 814,448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018, 941,758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,706,578, 212,1017,940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888, 394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470, 342,704,576,210,1015,938,810,444,316] [ns_server:debug,2014-08-19T16:49:51.213,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,204579}, tap_estimate, {replica_building,"default",485,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20909.0>, <<"replication_building_485_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:51.228,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,219273}, tap_estimate, {replica_building,"default",485,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25799.0>, <<"replication_building_485_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:51.228,ns_1@10.242.238.88:<0.30409.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25799.0>}, {'ns_1@10.242.238.91',<18126.20909.0>}]) [rebalance:info,2014-08-19T16:49:51.229,ns_1@10.242.238.88:<0.30400.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:51.229,ns_1@10.242.238.88:<0.30400.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 485 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:51.230,ns_1@10.242.238.88:<0.30400.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:51.230,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{485, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:51.234,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{995, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:51.235,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",995, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.30425.0>) [ns_server:debug,2014-08-19T16:49:51.235,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 995) [ns_server:debug,2014-08-19T16:49:51.235,ns_1@10.242.238.88:<0.30426.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:51.235,ns_1@10.242.238.88:<0.30426.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:51.235,ns_1@10.242.238.88:<0.30425.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 995 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:51.236,ns_1@10.242.238.88:<0.30431.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 995 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:51.236,ns_1@10.242.238.88:<0.30432.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 995 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:51.240,ns_1@10.242.238.88:<0.30433.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 995 into 'ns_1@10.242.238.90' is <18125.20327.0> [ns_server:debug,2014-08-19T16:49:51.242,ns_1@10.242.238.88:<0.30433.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 995 into 'ns_1@10.242.238.91' is <18126.20931.0> [rebalance:debug,2014-08-19T16:49:51.242,ns_1@10.242.238.88:<0.30425.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 995 is <0.30433.0> [ns_server:debug,2014-08-19T16:49:51.278,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,269882}, tap_estimate, {replica_building,"default",995,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20327.0>, <<"replication_building_995_'ns_1@10.242.238.90'">>} [views:debug,2014-08-19T16:49:51.285,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/903. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:51.285,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",903,active,0} [ns_server:debug,2014-08-19T16:49:51.297,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,288293}, tap_estimate, {replica_building,"default",995,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20931.0>, <<"replication_building_995_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:51.297,ns_1@10.242.238.88:<0.30434.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.20931.0>}, {'ns_1@10.242.238.90',<18125.20327.0>}]) [rebalance:info,2014-08-19T16:49:51.297,ns_1@10.242.238.88:<0.30425.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:51.298,ns_1@10.242.238.88:<0.30425.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 995 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:51.299,ns_1@10.242.238.88:<0.30425.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:51.299,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{995, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:51.304,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{740, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:51.304,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",740, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.30446.0>) [ns_server:debug,2014-08-19T16:49:51.304,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 740) [ns_server:debug,2014-08-19T16:49:51.304,ns_1@10.242.238.88:<0.30447.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:51.304,ns_1@10.242.238.88:<0.30447.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:51.304,ns_1@10.242.238.88:<0.30446.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 740 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:51.305,ns_1@10.242.238.88:<0.30452.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 740 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:51.305,ns_1@10.242.238.88:<0.30453.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 740 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:51.308,ns_1@10.242.238.88:<0.30454.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 740 into 'ns_1@10.242.238.91' is <18126.20937.0> [ns_server:debug,2014-08-19T16:49:51.310,ns_1@10.242.238.88:<0.30454.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 740 into 'ns_1@10.242.238.90' is <18125.20332.0> [rebalance:debug,2014-08-19T16:49:51.310,ns_1@10.242.238.88:<0.30446.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 740 is <0.30454.0> [ns_server:debug,2014-08-19T16:49:51.347,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,338683}, tap_estimate, {replica_building,"default",740,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20937.0>, <<"replication_building_740_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:51.363,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,354219}, tap_estimate, {replica_building,"default",740,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20332.0>, <<"replication_building_740_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:51.363,ns_1@10.242.238.88:<0.30455.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.20332.0>}, {'ns_1@10.242.238.91',<18126.20937.0>}]) [rebalance:info,2014-08-19T16:49:51.363,ns_1@10.242.238.88:<0.30446.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:51.364,ns_1@10.242.238.88:<0.30446.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 740 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:51.364,ns_1@10.242.238.88:<0.30446.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:51.365,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{740, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:51.369,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{484, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:51.369,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",484, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.30481.0>) [ns_server:debug,2014-08-19T16:49:51.370,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 484) [ns_server:debug,2014-08-19T16:49:51.370,ns_1@10.242.238.88:<0.30482.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:51.370,ns_1@10.242.238.88:<0.30482.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:51.370,ns_1@10.242.238.88:<0.30481.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 484 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:51.371,ns_1@10.242.238.88:<0.30487.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 484 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:51.371,ns_1@10.242.238.88:<0.30488.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 484 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:51.376,ns_1@10.242.238.88:<0.30497.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 484 into 'ns_1@10.242.238.91' is <18126.20942.0> [ns_server:debug,2014-08-19T16:49:51.378,ns_1@10.242.238.88:<0.30497.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 484 into 'ns_1@10.242.238.89' is <18124.25822.0> [rebalance:debug,2014-08-19T16:49:51.378,ns_1@10.242.238.88:<0.30481.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 484 is <0.30497.0> [ns_server:debug,2014-08-19T16:49:51.414,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,405166}, tap_estimate, {replica_building,"default",484,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20942.0>, <<"replication_building_484_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:51.430,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,421939}, tap_estimate, {replica_building,"default",484,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25822.0>, <<"replication_building_484_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:51.431,ns_1@10.242.238.88:<0.30498.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25822.0>}, {'ns_1@10.242.238.91',<18126.20942.0>}]) [rebalance:info,2014-08-19T16:49:51.431,ns_1@10.242.238.88:<0.30481.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:51.432,ns_1@10.242.238.88:<0.30481.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 484 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:51.432,ns_1@10.242.238.88:<0.30481.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:51.433,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{484, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:51.435,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 901. Nacking mccouch update. [views:debug,2014-08-19T16:49:51.435,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/901. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:51.435,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",901,active,0} [ns_server:debug,2014-08-19T16:49:51.437,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,698,570,204, 1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386, 258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462,334, 696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516, 150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592,226,954, 826,460,332,694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280, 953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484,356,901,718, 590,224,952,824,458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900, 772,406,278,951,640,512,146,874,508,380,1002,989,925,742,678,614,550,248,184, 120,976,912,848,784,482,418,354,290,963,716,652,588,524,222,158,950,886,822, 456,392,328,264,1014,937,754,690,626,562,196,132,1001,988,924,860,796,494, 430,366,302,975,911,728,664,600,536,234,170,962,898,834,770,468,404,340,276, 949,766,702,638,574,208,144,1013,936,872,808,506,442,378,314,1000,987,923, 740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961,714,650,586, 522,220,156,948,884,820,454,390,326,262,1012,999,935,752,688,624,560,194,130, 986,922,858,794,492,428,364,300,973,909,726,662,598,534,232,168,960,896,832, 768,466,402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504,440, 376,312,985,921,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 959,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686, 558,192,920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996, 868,502,374,919,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450, 322,995,684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760, 632,138,994,866,500,372,917,734,606,240,112,968,840,474,346,708,580,214,1019, 942,814,448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268, 1018,941,758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,706, 578,212,1017,940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160, 888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836, 470,342,704,576,210,1015,938,810,444,316] [ns_server:debug,2014-08-19T16:49:51.437,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{994, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:51.437,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",994, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.30512.0>) [ns_server:debug,2014-08-19T16:49:51.437,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 994) [ns_server:debug,2014-08-19T16:49:51.438,ns_1@10.242.238.88:<0.30513.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:51.438,ns_1@10.242.238.88:<0.30513.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:51.438,ns_1@10.242.238.88:<0.30512.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 994 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:51.438,ns_1@10.242.238.88:<0.30518.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 994 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:51.438,ns_1@10.242.238.88:<0.30519.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 994 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:51.442,ns_1@10.242.238.88:<0.30520.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 994 into 'ns_1@10.242.238.90' is <18125.20338.0> [ns_server:debug,2014-08-19T16:49:51.445,ns_1@10.242.238.88:<0.30520.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 994 into 'ns_1@10.242.238.91' is <18126.20961.0> [rebalance:debug,2014-08-19T16:49:51.445,ns_1@10.242.238.88:<0.30512.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 994 is <0.30520.0> [ns_server:debug,2014-08-19T16:49:51.480,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,471631}, tap_estimate, {replica_building,"default",994,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20338.0>, <<"replication_building_994_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:51.498,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,489609}, tap_estimate, {replica_building,"default",994,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20961.0>, <<"replication_building_994_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:51.499,ns_1@10.242.238.88:<0.30521.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.20961.0>}, {'ns_1@10.242.238.90',<18125.20338.0>}]) [rebalance:info,2014-08-19T16:49:51.499,ns_1@10.242.238.88:<0.30512.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:51.500,ns_1@10.242.238.88:<0.30512.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 994 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:51.500,ns_1@10.242.238.88:<0.30512.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:51.501,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{994, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:51.505,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{739, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:51.505,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",739, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.30533.0>) [ns_server:debug,2014-08-19T16:49:51.505,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 739) [ns_server:debug,2014-08-19T16:49:51.506,ns_1@10.242.238.88:<0.30534.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:51.506,ns_1@10.242.238.88:<0.30534.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:51.506,ns_1@10.242.238.88:<0.30533.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 739 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:51.506,ns_1@10.242.238.88:<0.30540.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 739 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [rebalance:info,2014-08-19T16:49:51.506,ns_1@10.242.238.88:<0.30539.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 739 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [ns_server:debug,2014-08-19T16:49:51.510,ns_1@10.242.238.88:<0.30541.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 739 into 'ns_1@10.242.238.91' is <18126.20967.0> [views:debug,2014-08-19T16:49:51.511,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/901. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:51.511,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",901,active,0} [ns_server:debug,2014-08-19T16:49:51.512,ns_1@10.242.238.88:<0.30541.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 739 into 'ns_1@10.242.238.90' is <18125.20364.0> [rebalance:debug,2014-08-19T16:49:51.512,ns_1@10.242.238.88:<0.30533.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 739 is <0.30541.0> [ns_server:debug,2014-08-19T16:49:51.551,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,542208}, tap_estimate, {replica_building,"default",739,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20967.0>, <<"replication_building_739_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:51.567,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,558879}, tap_estimate, {replica_building,"default",739,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20364.0>, <<"replication_building_739_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:51.568,ns_1@10.242.238.88:<0.30542.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.20364.0>}, {'ns_1@10.242.238.91',<18126.20967.0>}]) [rebalance:info,2014-08-19T16:49:51.568,ns_1@10.242.238.88:<0.30533.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:51.569,ns_1@10.242.238.88:<0.30533.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 739 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:51.569,ns_1@10.242.238.88:<0.30533.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:51.570,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{739, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:51.574,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{483, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:51.574,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",483, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.30569.0>) [ns_server:debug,2014-08-19T16:49:51.575,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 483) [ns_server:debug,2014-08-19T16:49:51.575,ns_1@10.242.238.88:<0.30570.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:51.575,ns_1@10.242.238.88:<0.30570.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:51.576,ns_1@10.242.238.88:<0.30569.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 483 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:51.576,ns_1@10.242.238.88:<0.30575.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 483 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:51.576,ns_1@10.242.238.88:<0.30576.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 483 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:51.579,ns_1@10.242.238.88:<0.30577.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 483 into 'ns_1@10.242.238.91' is <18126.20980.0> [ns_server:debug,2014-08-19T16:49:51.582,ns_1@10.242.238.88:<0.30577.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 483 into 'ns_1@10.242.238.89' is <18124.25842.0> [rebalance:debug,2014-08-19T16:49:51.582,ns_1@10.242.238.88:<0.30569.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 483 is <0.30577.0> [ns_server:debug,2014-08-19T16:49:51.586,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 899. Nacking mccouch update. [views:debug,2014-08-19T16:49:51.586,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/899. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:51.586,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",899,active,0} [ns_server:debug,2014-08-19T16:49:51.587,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,698,570,204, 1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386, 258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462,334, 696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516, 150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592,226,954, 826,460,332,694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280, 953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484,356,901,718, 590,224,952,824,458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900, 772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248,120,976,912,848, 784,482,418,354,290,963,899,716,652,588,524,222,158,950,886,822,456,392,328, 264,1014,937,754,690,626,562,196,132,1001,988,924,860,796,494,430,366,302, 975,911,728,664,600,536,234,170,962,898,834,770,468,404,340,276,949,766,702, 638,574,208,144,1013,936,872,808,506,442,378,314,1000,987,923,740,676,612, 548,246,182,118,974,910,846,782,480,416,352,288,961,714,650,586,522,220,156, 948,884,820,454,390,326,262,1012,999,935,752,688,624,560,194,130,986,922,858, 794,492,428,364,300,973,909,726,662,598,534,232,168,960,896,832,768,466,402, 338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312,985, 921,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,712,648, 584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192,920, 792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374, 919,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995,684, 556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994, 866,500,372,917,734,606,240,112,968,840,474,346,708,580,214,1019,942,814,448, 320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941,758, 630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,706,578,212,1017, 940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266, 1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470,342,704, 576,210,1015,938,810,444,316,989,678,550,184] [ns_server:debug,2014-08-19T16:49:51.617,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,608522}, tap_estimate, {replica_building,"default",483,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.20980.0>, <<"replication_building_483_'ns_1@10.242.238.91'">>} [views:debug,2014-08-19T16:49:51.620,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/899. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:51.620,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",899,active,0} [ns_server:debug,2014-08-19T16:49:51.635,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,626031}, tap_estimate, {replica_building,"default",483,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25842.0>, <<"replication_building_483_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:51.635,ns_1@10.242.238.88:<0.30578.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25842.0>}, {'ns_1@10.242.238.91',<18126.20980.0>}]) [rebalance:info,2014-08-19T16:49:51.635,ns_1@10.242.238.88:<0.30569.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:51.636,ns_1@10.242.238.88:<0.30569.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 483 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:51.636,ns_1@10.242.238.88:<0.30569.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:51.637,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{483, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:51.641,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{993, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:51.641,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",993, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.30590.0>) [ns_server:debug,2014-08-19T16:49:51.641,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 993) [ns_server:debug,2014-08-19T16:49:51.642,ns_1@10.242.238.88:<0.30591.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:51.642,ns_1@10.242.238.88:<0.30591.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:51.642,ns_1@10.242.238.88:<0.30590.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 993 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:51.642,ns_1@10.242.238.88:<0.30596.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 993 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:51.642,ns_1@10.242.238.88:<0.30597.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 993 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:51.646,ns_1@10.242.238.88:<0.30598.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 993 into 'ns_1@10.242.238.90' is <18125.20370.0> [ns_server:debug,2014-08-19T16:49:51.649,ns_1@10.242.238.88:<0.30598.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 993 into 'ns_1@10.242.238.91' is <18126.21001.0> [rebalance:debug,2014-08-19T16:49:51.649,ns_1@10.242.238.88:<0.30590.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 993 is <0.30598.0> [ns_server:debug,2014-08-19T16:49:51.685,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,676811}, tap_estimate, {replica_building,"default",993,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20370.0>, <<"replication_building_993_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:51.702,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,693003}, tap_estimate, {replica_building,"default",993,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21001.0>, <<"replication_building_993_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:51.702,ns_1@10.242.238.88:<0.30599.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.21001.0>}, {'ns_1@10.242.238.90',<18125.20370.0>}]) [rebalance:info,2014-08-19T16:49:51.702,ns_1@10.242.238.88:<0.30590.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:51.703,ns_1@10.242.238.88:<0.30590.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 993 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:51.703,ns_1@10.242.238.88:<0.30590.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:51.704,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{993, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:51.708,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{738, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:51.708,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",738, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.30625.0>) [ns_server:debug,2014-08-19T16:49:51.708,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 738) [ns_server:debug,2014-08-19T16:49:51.709,ns_1@10.242.238.88:<0.30626.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:51.709,ns_1@10.242.238.88:<0.30626.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:51.709,ns_1@10.242.238.88:<0.30625.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 738 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:51.709,ns_1@10.242.238.88:<0.30631.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 738 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:51.709,ns_1@10.242.238.88:<0.30632.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 738 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:51.713,ns_1@10.242.238.88:<0.30633.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 738 into 'ns_1@10.242.238.91' is <18126.21007.0> [ns_server:debug,2014-08-19T16:49:51.716,ns_1@10.242.238.88:<0.30633.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 738 into 'ns_1@10.242.238.90' is <18125.20389.0> [rebalance:debug,2014-08-19T16:49:51.716,ns_1@10.242.238.88:<0.30625.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 738 is <0.30633.0> [ns_server:debug,2014-08-19T16:49:51.731,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 897. Nacking mccouch update. [views:debug,2014-08-19T16:49:51.731,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/897. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:51.731,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",897,active,0} [ns_server:debug,2014-08-19T16:49:51.732,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,698,570,204, 1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386, 258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462,334, 696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516, 150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592,226,954, 826,460,332,694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280, 953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484,356,901,718, 590,224,952,824,458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900, 772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248,120,976,912,848, 784,482,418,354,290,963,899,716,652,588,524,222,158,950,886,822,456,392,328, 264,1014,937,754,690,626,562,196,132,1001,988,924,860,796,494,430,366,302, 975,911,728,664,600,536,234,170,962,898,834,770,468,404,340,276,949,766,702, 638,574,208,144,1013,936,872,808,506,442,378,314,1000,987,923,740,676,612, 548,246,182,118,974,910,846,782,480,416,352,288,961,897,714,650,586,522,220, 156,948,884,820,454,390,326,262,1012,999,935,752,688,624,560,194,130,986,922, 858,794,492,428,364,300,973,909,726,662,598,534,232,168,960,896,832,768,466, 402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312, 985,921,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,712, 648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192, 920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502, 374,919,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322,995, 684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138, 994,866,500,372,917,734,606,240,112,968,840,474,346,708,580,214,1019,942,814, 448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941, 758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,706,578,212, 1017,940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394, 266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470,342, 704,576,210,1015,938,810,444,316,989,678,550,184] [ns_server:debug,2014-08-19T16:49:51.750,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,741622}, tap_estimate, {replica_building,"default",738,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21007.0>, <<"replication_building_738_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:51.768,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,759864}, tap_estimate, {replica_building,"default",738,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20389.0>, <<"replication_building_738_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:51.769,ns_1@10.242.238.88:<0.30634.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.20389.0>}, {'ns_1@10.242.238.91',<18126.21007.0>}]) [rebalance:info,2014-08-19T16:49:51.769,ns_1@10.242.238.88:<0.30625.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:51.770,ns_1@10.242.238.88:<0.30625.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 738 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:51.770,ns_1@10.242.238.88:<0.30625.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:51.771,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{738, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:51.775,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{482, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:51.775,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",482, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.30646.0>) [ns_server:debug,2014-08-19T16:49:51.776,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 482) [ns_server:debug,2014-08-19T16:49:51.776,ns_1@10.242.238.88:<0.30647.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:51.776,ns_1@10.242.238.88:<0.30647.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:51.776,ns_1@10.242.238.88:<0.30646.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 482 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:51.776,ns_1@10.242.238.88:<0.30652.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 482 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:51.777,ns_1@10.242.238.88:<0.30653.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 482 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:51.781,ns_1@10.242.238.88:<0.30654.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 482 into 'ns_1@10.242.238.91' is <18126.21012.0> [ns_server:debug,2014-08-19T16:49:51.783,ns_1@10.242.238.88:<0.30654.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 482 into 'ns_1@10.242.238.89' is <18124.25848.0> [rebalance:debug,2014-08-19T16:49:51.783,ns_1@10.242.238.88:<0.30646.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 482 is <0.30654.0> [views:debug,2014-08-19T16:49:51.790,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/897. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:51.790,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",897,active,0} [ns_server:debug,2014-08-19T16:49:51.818,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,809523}, tap_estimate, {replica_building,"default",482,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21012.0>, <<"replication_building_482_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:51.836,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,827763}, tap_estimate, {replica_building,"default",482,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25848.0>, <<"replication_building_482_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:51.837,ns_1@10.242.238.88:<0.30655.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25848.0>}, {'ns_1@10.242.238.91',<18126.21012.0>}]) [rebalance:info,2014-08-19T16:49:51.837,ns_1@10.242.238.88:<0.30646.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:51.837,ns_1@10.242.238.88:<0.30646.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 482 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:51.838,ns_1@10.242.238.88:<0.30646.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:51.838,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{482, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:51.843,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{992, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:51.843,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",992, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.30667.0>) [ns_server:debug,2014-08-19T16:49:51.843,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 992) [ns_server:debug,2014-08-19T16:49:51.844,ns_1@10.242.238.88:<0.30668.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:51.844,ns_1@10.242.238.88:<0.30668.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:51.844,ns_1@10.242.238.88:<0.30667.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 992 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:51.844,ns_1@10.242.238.88:<0.30673.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 992 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:51.844,ns_1@10.242.238.88:<0.30674.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 992 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:51.848,ns_1@10.242.238.88:<0.30675.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 992 into 'ns_1@10.242.238.90' is <18125.20395.0> [ns_server:debug,2014-08-19T16:49:51.850,ns_1@10.242.238.88:<0.30675.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 992 into 'ns_1@10.242.238.91' is <18126.21031.0> [rebalance:debug,2014-08-19T16:49:51.850,ns_1@10.242.238.88:<0.30667.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 992 is <0.30675.0> [ns_server:debug,2014-08-19T16:49:51.886,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,877866}, tap_estimate, {replica_building,"default",992,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20395.0>, <<"replication_building_992_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:51.903,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,894176}, tap_estimate, {replica_building,"default",992,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21031.0>, <<"replication_building_992_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:51.903,ns_1@10.242.238.88:<0.30676.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.21031.0>}, {'ns_1@10.242.238.90',<18125.20395.0>}]) [rebalance:info,2014-08-19T16:49:51.903,ns_1@10.242.238.88:<0.30667.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:51.904,ns_1@10.242.238.88:<0.30667.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 992 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:51.904,ns_1@10.242.238.88:<0.30667.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:51.905,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{992, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:51.909,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{737, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:51.909,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",737, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.30702.0>) [ns_server:debug,2014-08-19T16:49:51.910,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 737) [ns_server:debug,2014-08-19T16:49:51.910,ns_1@10.242.238.88:<0.30703.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:51.910,ns_1@10.242.238.88:<0.30703.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:51.910,ns_1@10.242.238.88:<0.30702.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 737 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:51.910,ns_1@10.242.238.88:<0.30708.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 737 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:51.910,ns_1@10.242.238.88:<0.30709.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 737 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:51.914,ns_1@10.242.238.88:<0.30710.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 737 into 'ns_1@10.242.238.91' is <18126.21037.0> [ns_server:debug,2014-08-19T16:49:51.916,ns_1@10.242.238.88:<0.30710.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 737 into 'ns_1@10.242.238.90' is <18125.20414.0> [rebalance:debug,2014-08-19T16:49:51.916,ns_1@10.242.238.88:<0.30702.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 737 is <0.30710.0> [ns_server:debug,2014-08-19T16:49:51.952,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,943289}, tap_estimate, {replica_building,"default",737,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21037.0>, <<"replication_building_737_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:51.965,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 895. Nacking mccouch update. [views:debug,2014-08-19T16:49:51.965,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/895. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:51.965,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",895,active,0} [ns_server:debug,2014-08-19T16:49:51.966,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,698,570,204, 1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386, 258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462,334, 696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516, 150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592,226,954, 826,460,332,694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280, 953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484,356,901,718, 590,224,952,824,458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900, 772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248,120,976,912,848, 784,482,418,354,290,963,899,716,652,588,524,222,158,950,886,822,456,392,328, 264,1014,937,754,690,626,562,196,132,1001,988,924,860,796,494,430,366,302, 975,911,728,664,600,536,234,170,962,898,834,770,468,404,340,276,949,766,702, 638,574,208,144,1013,936,872,808,506,442,378,314,1000,987,923,740,676,612, 548,246,182,118,974,910,846,782,480,416,352,288,961,897,714,650,586,522,220, 156,948,884,820,454,390,326,262,1012,999,935,752,688,624,560,194,130,986,922, 858,794,492,428,364,300,973,909,726,662,598,534,232,168,960,896,832,768,466, 402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312, 985,921,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,895, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558, 192,920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868, 502,374,919,736,608,242,114,970,842,476,348,710,582,216,1021,944,816,450,322, 995,684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632, 138,994,866,500,372,917,734,606,240,112,968,840,474,346,708,580,214,1019,942, 814,448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018, 941,758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,706,578, 212,1017,940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888, 394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470, 342,704,576,210,1015,938,810,444,316,989,678,550,184] [ns_server:debug,2014-08-19T16:49:51.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452591,959351}, tap_estimate, {replica_building,"default",737,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20414.0>, <<"replication_building_737_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:51.968,ns_1@10.242.238.88:<0.30711.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.20414.0>}, {'ns_1@10.242.238.91',<18126.21037.0>}]) [rebalance:info,2014-08-19T16:49:51.969,ns_1@10.242.238.88:<0.30702.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:51.969,ns_1@10.242.238.88:<0.30702.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 737 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:51.970,ns_1@10.242.238.88:<0.30702.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:51.970,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{737, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:51.974,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{481, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:51.975,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",481, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.30723.0>) [ns_server:debug,2014-08-19T16:49:51.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 481) [ns_server:debug,2014-08-19T16:49:51.975,ns_1@10.242.238.88:<0.30724.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:51.975,ns_1@10.242.238.88:<0.30724.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:51.975,ns_1@10.242.238.88:<0.30723.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 481 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:51.976,ns_1@10.242.238.88:<0.30729.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 481 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:51.976,ns_1@10.242.238.88:<0.30730.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 481 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:51.979,ns_1@10.242.238.88:<0.30731.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 481 into 'ns_1@10.242.238.91' is <18126.21056.0> [ns_server:debug,2014-08-19T16:49:51.982,ns_1@10.242.238.88:<0.30731.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 481 into 'ns_1@10.242.238.89' is <18124.25889.0> [rebalance:debug,2014-08-19T16:49:51.982,ns_1@10.242.238.88:<0.30723.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 481 is <0.30731.0> [ns_server:debug,2014-08-19T16:49:52.017,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,8658}, tap_estimate, {replica_building,"default",481,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21056.0>, <<"replication_building_481_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:52.042,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,33005}, tap_estimate, {replica_building,"default",481,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25889.0>, <<"replication_building_481_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:52.042,ns_1@10.242.238.88:<0.30732.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25889.0>}, {'ns_1@10.242.238.91',<18126.21056.0>}]) [rebalance:info,2014-08-19T16:49:52.042,ns_1@10.242.238.88:<0.30723.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:52.043,ns_1@10.242.238.88:<0.30723.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 481 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:52.043,ns_1@10.242.238.88:<0.30723.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:52.044,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{481, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:52.048,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{991, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:52.048,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",991, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.30744.0>) [ns_server:debug,2014-08-19T16:49:52.048,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 991) [ns_server:debug,2014-08-19T16:49:52.049,ns_1@10.242.238.88:<0.30745.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:52.049,ns_1@10.242.238.88:<0.30745.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [views:debug,2014-08-19T16:49:52.049,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/895. Updated state: active (0) [rebalance:info,2014-08-19T16:49:52.049,ns_1@10.242.238.88:<0.30744.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 991 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [ns_server:debug,2014-08-19T16:49:52.049,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",895,active,0} [rebalance:info,2014-08-19T16:49:52.049,ns_1@10.242.238.88:<0.30750.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 991 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:52.049,ns_1@10.242.238.88:<0.30751.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 991 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:52.053,ns_1@10.242.238.88:<0.30752.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 991 into 'ns_1@10.242.238.90' is <18125.20434.0> [ns_server:debug,2014-08-19T16:49:52.055,ns_1@10.242.238.88:<0.30752.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 991 into 'ns_1@10.242.238.91' is <18126.21061.0> [rebalance:debug,2014-08-19T16:49:52.055,ns_1@10.242.238.88:<0.30744.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 991 is <0.30752.0> [ns_server:debug,2014-08-19T16:49:52.091,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,82487}, tap_estimate, {replica_building,"default",991,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20434.0>, <<"replication_building_991_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:52.108,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,99727}, tap_estimate, {replica_building,"default",991,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21061.0>, <<"replication_building_991_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:52.109,ns_1@10.242.238.88:<0.30753.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.21061.0>}, {'ns_1@10.242.238.90',<18125.20434.0>}]) [rebalance:info,2014-08-19T16:49:52.109,ns_1@10.242.238.88:<0.30744.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:52.110,ns_1@10.242.238.88:<0.30744.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 991 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:52.111,ns_1@10.242.238.88:<0.30744.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:52.111,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{991, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:52.116,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{736, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:52.116,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",736, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.30779.0>) [ns_server:debug,2014-08-19T16:49:52.116,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 736) [ns_server:debug,2014-08-19T16:49:52.116,ns_1@10.242.238.88:<0.30780.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:52.117,ns_1@10.242.238.88:<0.30780.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:52.117,ns_1@10.242.238.88:<0.30779.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 736 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:52.117,ns_1@10.242.238.88:<0.30785.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 736 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:52.117,ns_1@10.242.238.88:<0.30786.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 736 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:52.122,ns_1@10.242.238.88:<0.30787.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 736 into 'ns_1@10.242.238.91' is <18126.21081.0> [ns_server:debug,2014-08-19T16:49:52.125,ns_1@10.242.238.88:<0.30787.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 736 into 'ns_1@10.242.238.90' is <18125.20453.0> [rebalance:debug,2014-08-19T16:49:52.126,ns_1@10.242.238.88:<0.30779.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 736 is <0.30787.0> [ns_server:debug,2014-08-19T16:49:52.165,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,156112}, tap_estimate, {replica_building,"default",736,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21081.0>, <<"replication_building_736_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:52.178,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,169023}, tap_estimate, {replica_building,"default",736,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20453.0>, <<"replication_building_736_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:52.178,ns_1@10.242.238.88:<0.30788.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.20453.0>}, {'ns_1@10.242.238.91',<18126.21081.0>}]) [rebalance:info,2014-08-19T16:49:52.178,ns_1@10.242.238.88:<0.30779.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:52.179,ns_1@10.242.238.88:<0.30779.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 736 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:52.179,ns_1@10.242.238.88:<0.30779.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:52.180,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{736, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:52.184,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{480, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:52.184,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",480, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.30800.0>) [ns_server:debug,2014-08-19T16:49:52.184,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 480) [ns_server:debug,2014-08-19T16:49:52.185,ns_1@10.242.238.88:<0.30801.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:52.185,ns_1@10.242.238.88:<0.30801.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:52.185,ns_1@10.242.238.88:<0.30800.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 480 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:52.185,ns_1@10.242.238.88:<0.30806.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 480 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:52.185,ns_1@10.242.238.88:<0.30807.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 480 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:52.191,ns_1@10.242.238.88:<0.30808.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 480 into 'ns_1@10.242.238.91' is <18126.21100.0> [ns_server:debug,2014-08-19T16:49:52.194,ns_1@10.242.238.88:<0.30808.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 480 into 'ns_1@10.242.238.89' is <18124.25909.0> [rebalance:debug,2014-08-19T16:49:52.194,ns_1@10.242.238.88:<0.30800.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 480 is <0.30808.0> [ns_server:debug,2014-08-19T16:49:52.199,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 893. Nacking mccouch update. [views:debug,2014-08-19T16:49:52.199,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/893. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:52.199,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",893,active,0} [ns_server:debug,2014-08-19T16:49:52.201,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,698,570,204, 1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386, 258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462,334, 696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516, 150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592,226,954, 826,460,332,694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280, 953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484,356,901,718, 590,224,952,824,458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900, 772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248,120,976,912,848, 784,482,418,354,290,963,899,716,652,588,524,222,158,950,886,822,456,392,328, 264,1014,937,754,690,626,562,196,132,1001,988,924,860,796,494,430,366,302, 975,911,728,664,600,536,234,170,962,898,834,770,468,404,340,276,949,766,702, 638,574,208,144,1013,936,872,808,506,442,378,314,1000,987,923,740,676,612, 548,246,182,118,974,910,846,782,480,416,352,288,961,897,714,650,586,522,220, 156,948,884,820,454,390,326,262,1012,999,935,752,688,624,560,194,130,986,922, 858,794,492,428,364,300,973,909,726,662,598,534,232,168,960,896,832,768,466, 402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312, 985,921,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,895, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558, 192,920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868, 502,374,919,736,608,242,114,970,842,476,348,893,710,582,216,1021,944,816,450, 322,995,684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760, 632,138,994,866,500,372,917,734,606,240,112,968,840,474,346,708,580,214,1019, 942,814,448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268, 1018,941,758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,706, 578,212,1017,940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160, 888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836, 470,342,704,576,210,1015,938,810,444,316,989,678,550,184] [ns_server:debug,2014-08-19T16:49:52.231,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,222546}, tap_estimate, {replica_building,"default",480,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21100.0>, <<"replication_building_480_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:52.247,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,238917}, tap_estimate, {replica_building,"default",480,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25909.0>, <<"replication_building_480_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:52.248,ns_1@10.242.238.88:<0.30809.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25909.0>}, {'ns_1@10.242.238.91',<18126.21100.0>}]) [rebalance:info,2014-08-19T16:49:52.248,ns_1@10.242.238.88:<0.30800.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:52.249,ns_1@10.242.238.88:<0.30800.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 480 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:52.249,ns_1@10.242.238.88:<0.30800.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:52.250,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{480, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:52.254,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{990, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:52.254,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",990, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.30821.0>) [ns_server:debug,2014-08-19T16:49:52.254,ns_1@10.242.238.88:<0.30822.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:52.255,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 990) [ns_server:debug,2014-08-19T16:49:52.255,ns_1@10.242.238.88:<0.30822.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:52.255,ns_1@10.242.238.88:<0.30821.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 990 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:52.255,ns_1@10.242.238.88:<0.30827.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 990 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:52.255,ns_1@10.242.238.88:<0.30828.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 990 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:52.259,ns_1@10.242.238.88:<0.30829.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 990 into 'ns_1@10.242.238.90' is <18125.20473.0> [ns_server:debug,2014-08-19T16:49:52.261,ns_1@10.242.238.88:<0.30829.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 990 into 'ns_1@10.242.238.91' is <18126.21105.0> [rebalance:debug,2014-08-19T16:49:52.261,ns_1@10.242.238.88:<0.30821.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 990 is <0.30829.0> [views:debug,2014-08-19T16:49:52.283,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/893. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:52.283,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",893,active,0} [ns_server:debug,2014-08-19T16:49:52.297,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,287993}, tap_estimate, {replica_building,"default",990,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20473.0>, <<"replication_building_990_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:52.313,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,304691}, tap_estimate, {replica_building,"default",990,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21105.0>, <<"replication_building_990_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:52.314,ns_1@10.242.238.88:<0.30830.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.21105.0>}, {'ns_1@10.242.238.90',<18125.20473.0>}]) [rebalance:info,2014-08-19T16:49:52.314,ns_1@10.242.238.88:<0.30821.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:52.314,ns_1@10.242.238.88:<0.30821.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 990 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:52.315,ns_1@10.242.238.88:<0.30821.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:52.315,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{990, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:52.320,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{735, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:52.320,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",735, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.30842.0>) [ns_server:debug,2014-08-19T16:49:52.320,ns_1@10.242.238.88:<0.30843.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:52.320,ns_1@10.242.238.88:<0.30843.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:52.320,ns_1@10.242.238.88:<0.30842.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 735 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:52.321,ns_1@10.242.238.88:<0.30848.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 735 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:52.321,ns_1@10.242.238.88:<0.30849.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 735 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:52.326,ns_1@10.242.238.88:<0.30850.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 735 into 'ns_1@10.242.238.91' is <18126.21125.0> [ns_server:debug,2014-08-19T16:49:52.328,ns_1@10.242.238.88:<0.30850.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 735 into 'ns_1@10.242.238.90' is <18125.20478.0> [rebalance:debug,2014-08-19T16:49:52.328,ns_1@10.242.238.88:<0.30842.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 735 is <0.30850.0> [ns_server:debug,2014-08-19T16:49:52.329,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 735) [ns_server:debug,2014-08-19T16:49:52.365,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,355966}, tap_estimate, {replica_building,"default",735,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21125.0>, <<"replication_building_735_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:52.381,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,372463}, tap_estimate, {replica_building,"default",735,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20478.0>, <<"replication_building_735_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:52.381,ns_1@10.242.238.88:<0.30851.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.20478.0>}, {'ns_1@10.242.238.91',<18126.21125.0>}]) [rebalance:info,2014-08-19T16:49:52.382,ns_1@10.242.238.88:<0.30842.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:52.382,ns_1@10.242.238.88:<0.30842.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 735 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:52.383,ns_1@10.242.238.88:<0.30842.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:52.383,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{735, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:52.387,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{479, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:52.388,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",479, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.30877.0>) [ns_server:debug,2014-08-19T16:49:52.388,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 479) [ns_server:debug,2014-08-19T16:49:52.388,ns_1@10.242.238.88:<0.30878.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:52.388,ns_1@10.242.238.88:<0.30878.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:52.388,ns_1@10.242.238.88:<0.30877.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 479 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:52.389,ns_1@10.242.238.88:<0.30883.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 479 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:52.389,ns_1@10.242.238.88:<0.30884.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 479 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:52.393,ns_1@10.242.238.88:<0.30885.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 479 into 'ns_1@10.242.238.91' is <18126.21130.0> [ns_server:debug,2014-08-19T16:49:52.395,ns_1@10.242.238.88:<0.30885.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 479 into 'ns_1@10.242.238.89' is <18124.25929.0> [rebalance:debug,2014-08-19T16:49:52.395,ns_1@10.242.238.88:<0.30877.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 479 is <0.30885.0> [ns_server:debug,2014-08-19T16:49:52.430,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,421756}, tap_estimate, {replica_building,"default",479,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21130.0>, <<"replication_building_479_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:52.433,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 891. Nacking mccouch update. [views:debug,2014-08-19T16:49:52.433,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/891. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:52.433,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",891,active,0} [ns_server:debug,2014-08-19T16:49:52.434,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,698,570,204, 1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386, 258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462,334, 696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516, 150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592,226,954, 826,460,332,694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280, 953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484,356,901,718, 590,224,952,824,458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900, 772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248,120,976,912,848, 784,482,418,354,290,963,899,716,652,588,524,222,158,950,886,822,456,392,328, 264,1014,937,754,690,626,562,196,132,1001,988,924,860,796,494,430,366,302, 975,911,728,664,600,536,234,170,962,898,834,770,468,404,340,276,949,766,702, 638,574,208,144,1013,936,872,808,506,442,378,314,1000,987,923,740,676,612, 548,246,182,118,974,910,846,782,480,416,352,288,961,897,714,650,586,522,220, 156,948,884,820,454,390,326,262,1012,999,935,752,688,624,560,194,130,986,922, 858,794,492,428,364,300,973,909,726,662,598,534,232,168,960,896,832,768,466, 402,338,274,947,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312, 985,921,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,895, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558, 192,920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868, 502,374,919,736,608,242,114,970,842,476,348,893,710,582,216,1021,944,816,450, 322,995,684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760, 632,138,994,866,500,372,917,734,606,240,112,968,840,474,346,891,708,580,214, 1019,942,814,448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396, 268,1018,941,758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344, 706,578,212,1017,940,812,446,318,991,680,552,186,914,786,420,292,965,654,526, 160,888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964, 836,470,342,704,576,210,1015,938,810,444,316,989,678,550,184] [ns_server:debug,2014-08-19T16:49:52.447,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,438431}, tap_estimate, {replica_building,"default",479,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25929.0>, <<"replication_building_479_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:52.447,ns_1@10.242.238.88:<0.30886.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25929.0>}, {'ns_1@10.242.238.91',<18126.21130.0>}]) [rebalance:info,2014-08-19T16:49:52.448,ns_1@10.242.238.88:<0.30877.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:52.448,ns_1@10.242.238.88:<0.30877.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 479 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:52.449,ns_1@10.242.238.88:<0.30877.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:52.449,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{479, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:52.453,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{989, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:52.454,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",989, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.30898.0>) [ns_server:debug,2014-08-19T16:49:52.454,ns_1@10.242.238.88:<0.30899.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:52.454,ns_1@10.242.238.88:<0.30899.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:52.454,ns_1@10.242.238.88:<0.30898.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 989 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:52.455,ns_1@10.242.238.88:<0.30904.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 989 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:52.455,ns_1@10.242.238.88:<0.30905.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 989 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:52.455,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 989) [ns_server:debug,2014-08-19T16:49:52.458,ns_1@10.242.238.88:<0.30906.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 989 into 'ns_1@10.242.238.90' is <18125.20498.0> [ns_server:debug,2014-08-19T16:49:52.461,ns_1@10.242.238.88:<0.30906.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 989 into 'ns_1@10.242.238.91' is <18126.21149.0> [rebalance:debug,2014-08-19T16:49:52.461,ns_1@10.242.238.88:<0.30898.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 989 is <0.30906.0> [ns_server:debug,2014-08-19T16:49:52.496,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,487482}, tap_estimate, {replica_building,"default",989,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20498.0>, <<"replication_building_989_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:52.514,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,505221}, tap_estimate, {replica_building,"default",989,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21149.0>, <<"replication_building_989_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:52.514,ns_1@10.242.238.88:<0.30907.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.21149.0>}, {'ns_1@10.242.238.90',<18125.20498.0>}]) [rebalance:info,2014-08-19T16:49:52.514,ns_1@10.242.238.88:<0.30898.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:52.515,ns_1@10.242.238.88:<0.30898.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 989 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:52.515,ns_1@10.242.238.88:<0.30898.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:52.516,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{989, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [views:debug,2014-08-19T16:49:52.517,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/891. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:52.518,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",891,active,0} [ns_server:debug,2014-08-19T16:49:52.521,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{734, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:52.521,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",734, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.30919.0>) [ns_server:debug,2014-08-19T16:49:52.521,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 734) [ns_server:debug,2014-08-19T16:49:52.521,ns_1@10.242.238.88:<0.30920.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:52.521,ns_1@10.242.238.88:<0.30920.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:52.522,ns_1@10.242.238.88:<0.30919.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 734 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:52.522,ns_1@10.242.238.88:<0.30925.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 734 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:52.522,ns_1@10.242.238.88:<0.30926.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 734 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:52.526,ns_1@10.242.238.88:<0.30927.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 734 into 'ns_1@10.242.238.91' is <18126.21155.0> [ns_server:debug,2014-08-19T16:49:52.528,ns_1@10.242.238.88:<0.30927.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 734 into 'ns_1@10.242.238.90' is <18125.20503.0> [rebalance:debug,2014-08-19T16:49:52.528,ns_1@10.242.238.88:<0.30919.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 734 is <0.30927.0> [ns_server:debug,2014-08-19T16:49:52.567,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,558523}, tap_estimate, {replica_building,"default",734,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21155.0>, <<"replication_building_734_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:52.580,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,571901}, tap_estimate, {replica_building,"default",734,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20503.0>, <<"replication_building_734_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:52.581,ns_1@10.242.238.88:<0.30928.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.20503.0>}, {'ns_1@10.242.238.91',<18126.21155.0>}]) [rebalance:info,2014-08-19T16:49:52.581,ns_1@10.242.238.88:<0.30919.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:52.582,ns_1@10.242.238.88:<0.30919.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 734 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:52.582,ns_1@10.242.238.88:<0.30919.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:52.583,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{734, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:52.587,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{478, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:52.587,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",478, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.30940.0>) [ns_server:debug,2014-08-19T16:49:52.588,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 478) [ns_server:debug,2014-08-19T16:49:52.588,ns_1@10.242.238.88:<0.30941.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:52.588,ns_1@10.242.238.88:<0.30941.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:52.588,ns_1@10.242.238.88:<0.30940.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 478 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:52.588,ns_1@10.242.238.88:<0.30946.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 478 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:52.588,ns_1@10.242.238.88:<0.30947.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 478 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:52.592,ns_1@10.242.238.88:<0.30948.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 478 into 'ns_1@10.242.238.91' is <18126.21166.0> [ns_server:debug,2014-08-19T16:49:52.595,ns_1@10.242.238.88:<0.30948.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 478 into 'ns_1@10.242.238.89' is <18124.25949.0> [rebalance:debug,2014-08-19T16:49:52.595,ns_1@10.242.238.88:<0.30940.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 478 is <0.30948.0> [ns_server:debug,2014-08-19T16:49:52.631,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,622589}, tap_estimate, {replica_building,"default",478,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21166.0>, <<"replication_building_478_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:52.647,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,638909}, tap_estimate, {replica_building,"default",478,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25949.0>, <<"replication_building_478_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:52.648,ns_1@10.242.238.88:<0.30957.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25949.0>}, {'ns_1@10.242.238.91',<18126.21166.0>}]) [rebalance:info,2014-08-19T16:49:52.648,ns_1@10.242.238.88:<0.30940.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:52.649,ns_1@10.242.238.88:<0.30940.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 478 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:52.649,ns_1@10.242.238.88:<0.30940.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:52.650,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{478, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:52.657,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{988, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:52.657,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",988, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.30975.0>) [ns_server:debug,2014-08-19T16:49:52.657,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 988) [ns_server:debug,2014-08-19T16:49:52.657,ns_1@10.242.238.88:<0.30976.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:52.658,ns_1@10.242.238.88:<0.30976.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:52.658,ns_1@10.242.238.88:<0.30975.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 988 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:52.658,ns_1@10.242.238.88:<0.30981.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 988 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:52.658,ns_1@10.242.238.88:<0.30982.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 988 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:52.662,ns_1@10.242.238.88:<0.30983.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 988 into 'ns_1@10.242.238.90' is <18125.20523.0> [ns_server:debug,2014-08-19T16:49:52.664,ns_1@10.242.238.88:<0.30983.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 988 into 'ns_1@10.242.238.91' is <18126.21185.0> [rebalance:debug,2014-08-19T16:49:52.664,ns_1@10.242.238.88:<0.30975.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 988 is <0.30983.0> [ns_server:debug,2014-08-19T16:49:52.667,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 889. Nacking mccouch update. [views:debug,2014-08-19T16:49:52.667,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/889. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:52.667,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",889,active,0} [ns_server:debug,2014-08-19T16:49:52.669,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,698,570,204, 1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386, 258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462,334, 696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516, 150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592,226,954, 826,460,332,694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280, 953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484,356,901,718, 590,224,952,824,458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900, 772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248,120,976,848,482, 354,963,899,716,652,588,524,222,158,950,886,822,456,392,328,264,1014,937,754, 690,626,562,196,132,1001,988,924,860,796,494,430,366,302,975,911,728,664,600, 536,234,170,962,898,834,770,468,404,340,276,949,766,702,638,574,208,144,1013, 936,872,808,506,442,378,314,1000,987,923,740,676,612,548,246,182,118,974,910, 846,782,480,416,352,288,961,897,714,650,586,522,220,156,948,884,820,454,390, 326,262,1012,999,935,752,688,624,560,194,130,986,922,858,794,492,428,364,300, 973,909,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,764,700, 636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921,738,674,610,546, 244,180,116,972,908,844,780,478,414,350,286,959,895,712,648,584,520,218,154, 1023,946,882,818,452,388,324,260,1010,997,686,558,192,920,792,426,298,971, 660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608,242, 114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995,684,556,190,918, 790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994,866,500,372, 917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993, 682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941,758,630,136, 992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212,1017,940, 812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266,1016, 939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470,342,704,576, 210,1015,938,810,444,316,989,678,550,184,912,784,418,290] [ns_server:debug,2014-08-19T16:49:52.699,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,690613}, tap_estimate, {replica_building,"default",988,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20523.0>, <<"replication_building_988_'ns_1@10.242.238.90'">>} [views:debug,2014-08-19T16:49:52.712,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/889. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:52.712,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",889,active,0} [ns_server:debug,2014-08-19T16:49:52.717,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,708513}, tap_estimate, {replica_building,"default",988,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21185.0>, <<"replication_building_988_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:52.718,ns_1@10.242.238.88:<0.30984.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.21185.0>}, {'ns_1@10.242.238.90',<18125.20523.0>}]) [rebalance:info,2014-08-19T16:49:52.718,ns_1@10.242.238.88:<0.30975.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:52.718,ns_1@10.242.238.88:<0.30975.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 988 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:52.719,ns_1@10.242.238.88:<0.30975.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:52.720,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{988, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:52.724,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{733, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:52.724,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",733, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.30996.0>) [ns_server:debug,2014-08-19T16:49:52.725,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 733) [ns_server:debug,2014-08-19T16:49:52.725,ns_1@10.242.238.88:<0.30997.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:52.725,ns_1@10.242.238.88:<0.30997.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:52.725,ns_1@10.242.238.88:<0.30996.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 733 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:52.726,ns_1@10.242.238.88:<0.31002.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 733 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:52.726,ns_1@10.242.238.88:<0.31003.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 733 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:52.730,ns_1@10.242.238.88:<0.31004.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 733 into 'ns_1@10.242.238.91' is <18126.21191.0> [ns_server:debug,2014-08-19T16:49:52.731,ns_1@10.242.238.88:<0.31004.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 733 into 'ns_1@10.242.238.90' is <18125.20528.0> [rebalance:debug,2014-08-19T16:49:52.732,ns_1@10.242.238.88:<0.30996.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 733 is <0.31004.0> [ns_server:debug,2014-08-19T16:49:52.767,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,758695}, tap_estimate, {replica_building,"default",733,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21191.0>, <<"replication_building_733_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:52.784,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,775636}, tap_estimate, {replica_building,"default",733,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20528.0>, <<"replication_building_733_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:52.784,ns_1@10.242.238.88:<0.31005.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.20528.0>}, {'ns_1@10.242.238.91',<18126.21191.0>}]) [rebalance:info,2014-08-19T16:49:52.785,ns_1@10.242.238.88:<0.30996.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:52.785,ns_1@10.242.238.88:<0.30996.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 733 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:52.786,ns_1@10.242.238.88:<0.30996.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:52.786,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{733, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:52.790,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{477, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:52.791,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",477, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.31031.0>) [ns_server:debug,2014-08-19T16:49:52.791,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 477) [ns_server:debug,2014-08-19T16:49:52.791,ns_1@10.242.238.88:<0.31032.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:52.791,ns_1@10.242.238.88:<0.31032.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:52.791,ns_1@10.242.238.88:<0.31031.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 477 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:52.792,ns_1@10.242.238.88:<0.31037.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 477 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:52.792,ns_1@10.242.238.88:<0.31038.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 477 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:52.795,ns_1@10.242.238.88:<0.31039.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 477 into 'ns_1@10.242.238.91' is <18126.21196.0> [ns_server:debug,2014-08-19T16:49:52.797,ns_1@10.242.238.88:<0.31039.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 477 into 'ns_1@10.242.238.89' is <18124.25969.0> [rebalance:debug,2014-08-19T16:49:52.797,ns_1@10.242.238.88:<0.31031.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 477 is <0.31039.0> [ns_server:debug,2014-08-19T16:49:52.834,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,825610}, tap_estimate, {replica_building,"default",477,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21196.0>, <<"replication_building_477_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:52.845,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 887. Nacking mccouch update. [views:debug,2014-08-19T16:49:52.845,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/887. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:52.845,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",887,active,0} [ns_server:debug,2014-08-19T16:49:52.847,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,698,570,204, 1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386, 258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462,334, 696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516, 150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592,226,954, 826,460,332,694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280, 953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484,356,901,718, 590,224,952,824,458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900, 772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248,120,976,848,482, 354,963,899,716,652,588,524,222,158,950,886,822,456,392,328,264,1014,937,754, 690,626,562,196,132,1001,988,924,860,796,494,430,366,302,975,911,728,664,600, 536,234,170,962,898,834,770,468,404,340,276,949,766,702,638,574,208,144,1013, 936,872,808,506,442,378,314,1000,987,923,740,676,612,548,246,182,118,974,910, 846,782,480,416,352,288,961,897,714,650,586,522,220,156,948,884,820,454,390, 326,262,1012,999,935,752,688,624,560,194,130,986,922,858,794,492,428,364,300, 973,909,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,764,700, 636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921,738,674,610,546, 244,180,116,972,908,844,780,478,414,350,286,959,895,712,648,584,520,218,154, 1023,946,882,818,452,388,324,260,1010,997,686,558,192,920,792,426,298,971, 660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608,242, 114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995,684,556,190,918, 790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994,866,500,372, 917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993, 682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941,758,630,136, 992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212,1017,940, 812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266,1016, 939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470,342,887,704, 576,210,1015,938,810,444,316,989,678,550,184,912,784,418,290] [ns_server:debug,2014-08-19T16:49:52.851,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,842044}, tap_estimate, {replica_building,"default",477,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25969.0>, <<"replication_building_477_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:52.851,ns_1@10.242.238.88:<0.31040.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25969.0>}, {'ns_1@10.242.238.91',<18126.21196.0>}]) [rebalance:info,2014-08-19T16:49:52.851,ns_1@10.242.238.88:<0.31031.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:52.852,ns_1@10.242.238.88:<0.31031.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 477 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:52.852,ns_1@10.242.238.88:<0.31031.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:52.853,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{477, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:52.857,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{987, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:52.857,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",987, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.31052.0>) [ns_server:debug,2014-08-19T16:49:52.857,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 987) [ns_server:debug,2014-08-19T16:49:52.858,ns_1@10.242.238.88:<0.31053.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:52.858,ns_1@10.242.238.88:<0.31053.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:52.858,ns_1@10.242.238.88:<0.31052.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 987 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:52.858,ns_1@10.242.238.88:<0.31058.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 987 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:52.858,ns_1@10.242.238.88:<0.31059.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 987 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:52.862,ns_1@10.242.238.88:<0.31060.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 987 into 'ns_1@10.242.238.90' is <18125.20549.0> [ns_server:debug,2014-08-19T16:49:52.865,ns_1@10.242.238.88:<0.31060.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 987 into 'ns_1@10.242.238.91' is <18126.21215.0> [rebalance:debug,2014-08-19T16:49:52.865,ns_1@10.242.238.88:<0.31052.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 987 is <0.31060.0> [views:debug,2014-08-19T16:49:52.879,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/887. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:52.880,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",887,active,0} [ns_server:debug,2014-08-19T16:49:52.900,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,891916}, tap_estimate, {replica_building,"default",987,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20549.0>, <<"replication_building_987_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:52.919,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,910388}, tap_estimate, {replica_building,"default",987,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21215.0>, <<"replication_building_987_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:52.919,ns_1@10.242.238.88:<0.31061.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.21215.0>}, {'ns_1@10.242.238.90',<18125.20549.0>}]) [rebalance:info,2014-08-19T16:49:52.920,ns_1@10.242.238.88:<0.31052.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:52.920,ns_1@10.242.238.88:<0.31052.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 987 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:52.921,ns_1@10.242.238.88:<0.31052.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:52.921,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{987, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:52.925,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{732, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:52.926,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",732, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.31087.0>) [ns_server:debug,2014-08-19T16:49:52.926,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 732) [ns_server:debug,2014-08-19T16:49:52.926,ns_1@10.242.238.88:<0.31088.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:52.926,ns_1@10.242.238.88:<0.31088.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:52.927,ns_1@10.242.238.88:<0.31087.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 732 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:52.927,ns_1@10.242.238.88:<0.31093.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 732 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:52.927,ns_1@10.242.238.88:<0.31094.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 732 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:52.931,ns_1@10.242.238.88:<0.31095.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 732 into 'ns_1@10.242.238.91' is <18126.21221.0> [ns_server:debug,2014-08-19T16:49:52.933,ns_1@10.242.238.88:<0.31095.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 732 into 'ns_1@10.242.238.90' is <18125.20554.0> [rebalance:debug,2014-08-19T16:49:52.933,ns_1@10.242.238.88:<0.31087.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 732 is <0.31095.0> [ns_server:debug,2014-08-19T16:49:52.949,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,732, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,987, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_987_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_987_'ns_1@10.242.238.90'">>}]}, {move_state,477, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_477_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_477_'ns_1@10.242.238.91'">>}]}, {move_state,733, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_733_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_733_'ns_1@10.242.238.91'">>}]}, {move_state,988, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_988_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_988_'ns_1@10.242.238.90'">>}]}, {move_state,478, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_478_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_478_'ns_1@10.242.238.91'">>}]}, {move_state,734, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_734_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_734_'ns_1@10.242.238.91'">>}]}, {move_state,989, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_989_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_989_'ns_1@10.242.238.90'">>}]}, {move_state,479, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_479_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_479_'ns_1@10.242.238.91'">>}]}, {move_state,735, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_735_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_735_'ns_1@10.242.238.91'">>}]}, {move_state,990, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_990_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_990_'ns_1@10.242.238.90'">>}]}, {move_state,480, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_480_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_480_'ns_1@10.242.238.91'">>}]}, {move_state,736, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_736_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_736_'ns_1@10.242.238.91'">>}]}, {move_state,991, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_991_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_991_'ns_1@10.242.238.90'">>}]}, {move_state,481, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_481_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_481_'ns_1@10.242.238.91'">>}]}, {move_state,737, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_737_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_737_'ns_1@10.242.238.91'">>}]}, {move_state,992, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_992_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_992_'ns_1@10.242.238.90'">>}]}, {move_state,482, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_482_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_482_'ns_1@10.242.238.91'">>}]}, {move_state,738, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_738_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_738_'ns_1@10.242.238.91'">>}]}, {move_state,993, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_993_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_993_'ns_1@10.242.238.90'">>}]}, {move_state,483, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_483_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_483_'ns_1@10.242.238.91'">>}]}, {move_state,739, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_739_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_739_'ns_1@10.242.238.91'">>}]}, {move_state,994, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_994_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_994_'ns_1@10.242.238.90'">>}]}, {move_state,484, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_484_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_484_'ns_1@10.242.238.91'">>}]}, {move_state,740, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_740_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_740_'ns_1@10.242.238.91'">>}]}, {move_state,995, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_995_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_995_'ns_1@10.242.238.90'">>}]}, {move_state,485, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_485_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_485_'ns_1@10.242.238.91'">>}]}, {move_state,741, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_741_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_741_'ns_1@10.242.238.91'">>}]}, {move_state,996, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_996_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_996_'ns_1@10.242.238.90'">>}]}, {move_state,486, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_486_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_486_'ns_1@10.242.238.91'">>}]}, {move_state,742, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_742_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_742_'ns_1@10.242.238.91'">>}]}, {move_state,997, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_997_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_997_'ns_1@10.242.238.90'">>}]}, {move_state,487, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_487_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_487_'ns_1@10.242.238.91'">>}]}, {move_state,743, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_743_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_743_'ns_1@10.242.238.91'">>}]}, {move_state,998, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_998_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_998_'ns_1@10.242.238.90'">>}]}, {move_state,488, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_488_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_488_'ns_1@10.242.238.91'">>}]}, {move_state,744, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_744_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_744_'ns_1@10.242.238.91'">>}]}, {move_state,999, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_999_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_999_'ns_1@10.242.238.90'">>}]}, {move_state,489, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_489_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_489_'ns_1@10.242.238.91'">>}]}, {move_state,745, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_745_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_745_'ns_1@10.242.238.91'">>}]}, {move_state,1000, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1000_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1000_'ns_1@10.242.238.90'">>}]}, {move_state,490, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_490_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_490_'ns_1@10.242.238.91'">>}]}, {move_state,746, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_746_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_746_'ns_1@10.242.238.91'">>}]}, {move_state,1001, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1001_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1001_'ns_1@10.242.238.90'">>}]}] [ns_server:debug,2014-08-19T16:49:52.954,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 987, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:52.954,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 477, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.954,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 885. Nacking mccouch update. [views:debug,2014-08-19T16:49:52.955,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/885. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:52.955,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 733, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.955,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",885,active,0} [ns_server:debug,2014-08-19T16:49:52.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 988, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:52.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 478, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.956,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,698,570,204, 1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386, 258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462,334, 696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516, 150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592,226,954, 826,460,332,694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280, 953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484,356,901,718, 590,224,952,824,458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900, 772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248,120,976,848,482, 354,963,899,716,652,588,524,222,158,950,886,822,456,392,328,264,1014,937,754, 690,626,562,196,132,1001,988,924,860,796,494,430,366,302,975,911,728,664,600, 536,234,170,962,898,834,770,468,404,340,276,949,885,766,702,638,574,208,144, 1013,936,872,808,506,442,378,314,1000,987,923,740,676,612,548,246,182,118, 974,910,846,782,480,416,352,288,961,897,714,650,586,522,220,156,948,884,820, 454,390,326,262,1012,999,935,752,688,624,560,194,130,986,922,858,794,492,428, 364,300,973,909,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947, 764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921,738,674, 610,546,244,180,116,972,908,844,780,478,414,350,286,959,895,712,648,584,520, 218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192,920,792,426, 298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736, 608,242,114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995,684,556, 190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994,866, 500,372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448, 320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941,758, 630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212, 1017,940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394, 266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470,342, 887,704,576,210,1015,938,810,444,316,989,678,550,184,912,784,418,290] [ns_server:debug,2014-08-19T16:49:52.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 734, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 989, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:52.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 479, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 735, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 990, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:52.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 480, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 736, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 991, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:52.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 481, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 737, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 992, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:52.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 482, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 738, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 993, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:52.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 483, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 739, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 994, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:52.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 484, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 740, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 995, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:52.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 485, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 741, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 996, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:52.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 486, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,959424}, tap_estimate, {replica_building,"default",732,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21221.0>, <<"replication_building_732_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:52.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 742, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 997, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:52.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 487, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 743, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 998, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:52.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 488, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 744, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 999, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:52.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 489, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 745, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1000, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:52.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 490, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 746, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:52.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1001, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:52.991,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452592,982059}, tap_estimate, {replica_building,"default",732,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20554.0>, <<"replication_building_732_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:52.991,ns_1@10.242.238.88:<0.31096.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.20554.0>}, {'ns_1@10.242.238.91',<18126.21221.0>}]) [rebalance:info,2014-08-19T16:49:52.991,ns_1@10.242.238.88:<0.31087.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:52.992,ns_1@10.242.238.88:<0.31087.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 732 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:52.992,ns_1@10.242.238.88:<0.31087.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:52.993,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{732, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:52.997,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{476, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:52.997,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",476, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.31152.0>) [ns_server:debug,2014-08-19T16:49:52.998,ns_1@10.242.238.88:<0.31153.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:52.998,ns_1@10.242.238.88:<0.31153.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:52.998,ns_1@10.242.238.88:<0.31152.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 476 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:52.998,ns_1@10.242.238.88:<0.31158.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 476 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:52.998,ns_1@10.242.238.88:<0.31159.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 476 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:53.002,ns_1@10.242.238.88:<0.31160.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 476 into 'ns_1@10.242.238.91' is <18126.21226.0> [ns_server:debug,2014-08-19T16:49:53.004,ns_1@10.242.238.88:<0.31160.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 476 into 'ns_1@10.242.238.89' is <18124.25989.0> [rebalance:debug,2014-08-19T16:49:53.004,ns_1@10.242.238.88:<0.31152.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 476 is <0.31160.0> [ns_server:debug,2014-08-19T16:49:53.004,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 476) [views:debug,2014-08-19T16:49:53.013,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/885. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:53.013,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",885,active,0} [ns_server:debug,2014-08-19T16:49:53.040,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,31255}, tap_estimate, {replica_building,"default",476,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21226.0>, <<"replication_building_476_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:53.064,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,55750}, tap_estimate, {replica_building,"default",476,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.25989.0>, <<"replication_building_476_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:53.065,ns_1@10.242.238.88:<0.31161.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.25989.0>}, {'ns_1@10.242.238.91',<18126.21226.0>}]) [rebalance:info,2014-08-19T16:49:53.065,ns_1@10.242.238.88:<0.31152.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:53.066,ns_1@10.242.238.88:<0.31152.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 476 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:53.066,ns_1@10.242.238.88:<0.31152.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:53.067,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{476, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:53.071,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{986, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:53.071,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",986, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.31187.0>) [ns_server:debug,2014-08-19T16:49:53.072,ns_1@10.242.238.88:<0.31188.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:53.072,ns_1@10.242.238.88:<0.31188.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [ns_server:debug,2014-08-19T16:49:53.072,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 986) [rebalance:info,2014-08-19T16:49:53.072,ns_1@10.242.238.88:<0.31187.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 986 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:53.073,ns_1@10.242.238.88:<0.31193.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 986 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:53.073,ns_1@10.242.238.88:<0.31194.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 986 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:53.077,ns_1@10.242.238.88:<0.31195.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 986 into 'ns_1@10.242.238.90' is <18125.20574.0> [ns_server:debug,2014-08-19T16:49:53.078,ns_1@10.242.238.88:<0.31195.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 986 into 'ns_1@10.242.238.91' is <18126.21245.0> [rebalance:debug,2014-08-19T16:49:53.078,ns_1@10.242.238.88:<0.31187.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 986 is <0.31195.0> [ns_server:debug,2014-08-19T16:49:53.114,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 883. Nacking mccouch update. [views:debug,2014-08-19T16:49:53.114,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/883. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:53.114,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",883,active,0} [ns_server:debug,2014-08-19T16:49:53.116,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,107544}, tap_estimate, {replica_building,"default",986,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20574.0>, <<"replication_building_986_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:53.116,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,698,570,204, 1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880,386, 258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462,334, 696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644,516, 150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592,226,954, 826,460,332,694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408,280, 953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484,356,901,718, 590,224,952,824,458,330,692,564,198,1003,926,798,432,304,977,666,538,172,900, 772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248,120,976,848,482, 354,963,899,716,652,588,524,222,158,950,886,822,456,392,328,264,1014,937,754, 690,626,562,196,132,1001,988,924,860,796,494,430,366,302,975,911,728,664,600, 536,234,170,962,898,834,770,468,404,340,276,949,885,766,702,638,574,208,144, 1013,936,872,808,506,442,378,314,1000,987,923,740,676,612,548,246,182,118, 974,910,846,782,480,416,352,288,961,897,714,650,586,522,220,156,948,884,820, 454,390,326,262,1012,999,935,752,688,624,560,194,130,986,922,858,794,492,428, 364,300,973,909,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947, 883,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921,738, 674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,895,712,648,584, 520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192,920,792, 426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919, 736,608,242,114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995,684, 556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994, 866,500,372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814, 448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941, 758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578, 212,1017,940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888, 394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470, 342,887,704,576,210,1015,938,810,444,316,989,678,550,184,912,784,418,290] [ns_server:debug,2014-08-19T16:49:53.133,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,124548}, tap_estimate, {replica_building,"default",986,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21245.0>, <<"replication_building_986_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:53.134,ns_1@10.242.238.88:<0.31196.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.21245.0>}, {'ns_1@10.242.238.90',<18125.20574.0>}]) [rebalance:info,2014-08-19T16:49:53.134,ns_1@10.242.238.88:<0.31187.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:53.134,ns_1@10.242.238.88:<0.31187.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 986 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:53.135,ns_1@10.242.238.88:<0.31187.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:53.135,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{986, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:53.140,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{731, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:53.140,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",731, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.31208.0>) [ns_server:debug,2014-08-19T16:49:53.140,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 731) [ns_server:debug,2014-08-19T16:49:53.141,ns_1@10.242.238.88:<0.31209.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:53.141,ns_1@10.242.238.88:<0.31209.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:53.141,ns_1@10.242.238.88:<0.31208.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 731 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:53.141,ns_1@10.242.238.88:<0.31214.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 731 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:53.141,ns_1@10.242.238.88:<0.31215.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 731 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:53.146,ns_1@10.242.238.88:<0.31216.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 731 into 'ns_1@10.242.238.91' is <18126.21251.0> [ns_server:debug,2014-08-19T16:49:53.148,ns_1@10.242.238.88:<0.31216.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 731 into 'ns_1@10.242.238.90' is <18125.20579.0> [rebalance:debug,2014-08-19T16:49:53.149,ns_1@10.242.238.88:<0.31208.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 731 is <0.31216.0> [views:debug,2014-08-19T16:49:53.181,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/883. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:53.181,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",883,active,0} [ns_server:debug,2014-08-19T16:49:53.191,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,182679}, tap_estimate, {replica_building,"default",731,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21251.0>, <<"replication_building_731_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:53.205,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,196146}, tap_estimate, {replica_building,"default",731,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20579.0>, <<"replication_building_731_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:53.205,ns_1@10.242.238.88:<0.31217.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.20579.0>}, {'ns_1@10.242.238.91',<18126.21251.0>}]) [rebalance:info,2014-08-19T16:49:53.205,ns_1@10.242.238.88:<0.31208.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:53.206,ns_1@10.242.238.88:<0.31208.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 731 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:53.207,ns_1@10.242.238.88:<0.31208.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:53.207,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{731, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:53.211,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{475, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:53.212,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",475, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.31234.0>) [ns_server:debug,2014-08-19T16:49:53.212,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 475) [ns_server:debug,2014-08-19T16:49:53.212,ns_1@10.242.238.88:<0.31235.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:53.212,ns_1@10.242.238.88:<0.31235.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:53.213,ns_1@10.242.238.88:<0.31234.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 475 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:53.213,ns_1@10.242.238.88:<0.31240.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 475 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:53.213,ns_1@10.242.238.88:<0.31241.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 475 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:53.216,ns_1@10.242.238.88:<0.31242.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 475 into 'ns_1@10.242.238.91' is <18126.21256.0> [ns_server:debug,2014-08-19T16:49:53.219,ns_1@10.242.238.88:<0.31242.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 475 into 'ns_1@10.242.238.89' is <18124.26009.0> [rebalance:debug,2014-08-19T16:49:53.219,ns_1@10.242.238.88:<0.31234.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 475 is <0.31242.0> [ns_server:debug,2014-08-19T16:49:53.254,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,245382}, tap_estimate, {replica_building,"default",475,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21256.0>, <<"replication_building_475_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:53.272,ns_1@10.242.238.88:<0.31243.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26009.0>}, {'ns_1@10.242.238.91',<18126.21256.0>}]) [rebalance:info,2014-08-19T16:49:53.273,ns_1@10.242.238.88:<0.31234.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:53.273,ns_1@10.242.238.88:<0.31234.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 475 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:53.274,ns_1@10.242.238.88:<0.31234.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:53.274,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{475, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:53.281,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,263688}, tap_estimate, {replica_building,"default",475,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26009.0>, <<"replication_building_475_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:53.282,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{985, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:53.282,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",985, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.31269.0>) [ns_server:debug,2014-08-19T16:49:53.282,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 985) [ns_server:debug,2014-08-19T16:49:53.283,ns_1@10.242.238.88:<0.31270.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:53.283,ns_1@10.242.238.88:<0.31270.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:53.283,ns_1@10.242.238.88:<0.31269.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 985 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:53.283,ns_1@10.242.238.88:<0.31275.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 985 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:53.283,ns_1@10.242.238.88:<0.31276.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 985 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:53.288,ns_1@10.242.238.88:<0.31277.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 985 into 'ns_1@10.242.238.90' is <18125.20599.0> [ns_server:debug,2014-08-19T16:49:53.290,ns_1@10.242.238.88:<0.31277.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 985 into 'ns_1@10.242.238.91' is <18126.21275.0> [rebalance:debug,2014-08-19T16:49:53.290,ns_1@10.242.238.88:<0.31269.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 985 is <0.31277.0> [ns_server:debug,2014-08-19T16:49:53.314,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 881. Nacking mccouch update. [views:debug,2014-08-19T16:49:53.314,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/881. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:53.315,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",881,active,0} [ns_server:debug,2014-08-19T16:49:53.316,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880, 386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462, 334,696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955,644, 516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592,226, 954,826,460,332,694,566,200,1005,928,800,434,306,979,668,540,174,902,774,408, 280,953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484,356,901, 718,590,224,952,824,458,330,692,564,198,1003,926,798,432,304,977,666,538,172, 900,772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248,120,976,848, 482,354,963,899,716,652,588,524,222,158,950,886,822,456,392,328,264,1014,937, 754,690,626,562,196,132,1001,988,924,860,796,494,430,366,302,975,911,728,664, 600,536,234,170,962,898,834,770,468,404,340,276,949,885,766,702,638,574,208, 144,1013,936,872,808,506,442,378,314,1000,987,923,740,676,612,548,246,182, 118,974,910,846,782,480,416,352,288,961,897,714,650,586,522,220,156,948,884, 820,454,390,326,262,1012,999,935,752,688,624,560,194,130,986,922,858,794,492, 428,364,300,973,909,726,662,598,534,232,168,960,896,832,768,466,402,338,274, 947,883,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921, 738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,895,712,648, 584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192,920, 792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374, 919,736,608,242,114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995, 684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138, 994,866,500,372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942, 814,448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018, 941,758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706, 578,212,1017,940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160, 888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,444,316,989,678,550,184,912,784,418,290] [ns_server:debug,2014-08-19T16:49:53.326,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,317236}, tap_estimate, {replica_building,"default",985,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20599.0>, <<"replication_building_985_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:53.344,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,335521}, tap_estimate, {replica_building,"default",985,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21275.0>, <<"replication_building_985_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:53.345,ns_1@10.242.238.88:<0.31278.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.21275.0>}, {'ns_1@10.242.238.90',<18125.20599.0>}]) [rebalance:info,2014-08-19T16:49:53.345,ns_1@10.242.238.88:<0.31269.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:53.345,ns_1@10.242.238.88:<0.31269.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 985 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:53.346,ns_1@10.242.238.88:<0.31269.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:53.346,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{985, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:53.350,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{730, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:53.350,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",730, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.31290.0>) [ns_server:debug,2014-08-19T16:49:53.351,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 730) [ns_server:debug,2014-08-19T16:49:53.351,ns_1@10.242.238.88:<0.31291.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:53.351,ns_1@10.242.238.88:<0.31291.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:53.351,ns_1@10.242.238.88:<0.31290.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 730 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:53.351,ns_1@10.242.238.88:<0.31296.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 730 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:53.351,ns_1@10.242.238.88:<0.31297.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 730 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:53.356,ns_1@10.242.238.88:<0.31298.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 730 into 'ns_1@10.242.238.91' is <18126.21281.0> [ns_server:debug,2014-08-19T16:49:53.358,ns_1@10.242.238.88:<0.31298.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 730 into 'ns_1@10.242.238.90' is <18125.20604.0> [rebalance:debug,2014-08-19T16:49:53.358,ns_1@10.242.238.88:<0.31290.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 730 is <0.31298.0> [views:debug,2014-08-19T16:49:53.390,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/881. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:53.391,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",881,active,0} [ns_server:debug,2014-08-19T16:49:53.395,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,386104}, tap_estimate, {replica_building,"default",730,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21281.0>, <<"replication_building_730_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:53.416,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,407317}, tap_estimate, {replica_building,"default",730,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20604.0>, <<"replication_building_730_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:53.417,ns_1@10.242.238.88:<0.31299.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.20604.0>}, {'ns_1@10.242.238.91',<18126.21281.0>}]) [rebalance:info,2014-08-19T16:49:53.417,ns_1@10.242.238.88:<0.31290.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:53.417,ns_1@10.242.238.88:<0.31290.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 730 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:53.418,ns_1@10.242.238.88:<0.31290.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:53.418,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{730, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:53.423,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{474, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:53.423,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",474, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.31311.0>) [ns_server:debug,2014-08-19T16:49:53.423,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 474) [ns_server:debug,2014-08-19T16:49:53.423,ns_1@10.242.238.88:<0.31312.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:53.423,ns_1@10.242.238.88:<0.31312.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:53.424,ns_1@10.242.238.88:<0.31311.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 474 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:53.424,ns_1@10.242.238.88:<0.31317.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 474 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:53.424,ns_1@10.242.238.88:<0.31318.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 474 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:53.429,ns_1@10.242.238.88:<0.31319.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 474 into 'ns_1@10.242.238.91' is <18126.21286.0> [ns_server:debug,2014-08-19T16:49:53.431,ns_1@10.242.238.88:<0.31319.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 474 into 'ns_1@10.242.238.89' is <18124.26015.0> [rebalance:debug,2014-08-19T16:49:53.431,ns_1@10.242.238.88:<0.31311.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 474 is <0.31319.0> [ns_server:debug,2014-08-19T16:49:53.467,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,458365}, tap_estimate, {replica_building,"default",474,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21286.0>, <<"replication_building_474_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:53.486,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,477097}, tap_estimate, {replica_building,"default",474,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26015.0>, <<"replication_building_474_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:53.486,ns_1@10.242.238.88:<0.31320.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26015.0>}, {'ns_1@10.242.238.91',<18126.21286.0>}]) [rebalance:info,2014-08-19T16:49:53.486,ns_1@10.242.238.88:<0.31311.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:53.487,ns_1@10.242.238.88:<0.31311.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 474 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:53.487,ns_1@10.242.238.88:<0.31311.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:53.488,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{474, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:53.492,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{984, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:53.492,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",984, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.31347.0>) [ns_server:debug,2014-08-19T16:49:53.493,ns_1@10.242.238.88:<0.31348.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:53.493,ns_1@10.242.238.88:<0.31348.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:53.493,ns_1@10.242.238.88:<0.31347.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 984 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:53.493,ns_1@10.242.238.88:<0.31353.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 984 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:53.494,ns_1@10.242.238.88:<0.31354.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 984 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:53.494,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 879. Nacking mccouch update. [ns_server:debug,2014-08-19T16:49:53.494,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 984) [views:debug,2014-08-19T16:49:53.494,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/879. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:53.495,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",879,active,0} [ns_server:debug,2014-08-19T16:49:53.496,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880, 386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462, 334,879,696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955, 644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592, 226,954,826,460,332,694,566,200,1005,928,800,434,306,979,668,540,174,902,774, 408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484,356, 901,718,590,224,952,824,458,330,692,564,198,1003,926,798,432,304,977,666,538, 172,900,772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248,120,976, 848,482,354,899,716,588,222,950,886,822,456,392,328,264,1014,937,754,690,626, 562,196,132,1001,988,924,860,796,494,430,366,302,975,911,728,664,600,536,234, 170,962,898,834,770,468,404,340,276,949,885,766,702,638,574,208,144,1013,936, 872,808,506,442,378,314,1000,987,923,740,676,612,548,246,182,118,974,910,846, 782,480,416,352,288,961,897,714,650,586,522,220,156,948,884,820,454,390,326, 262,1012,999,935,752,688,624,560,194,130,986,922,858,794,492,428,364,300,973, 909,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,764,700, 636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921,738,674,610,546, 244,180,116,972,908,844,780,478,414,350,286,959,895,712,648,584,520,218,154, 1023,946,882,818,452,388,324,260,1010,997,686,558,192,920,792,426,298,971, 660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608,242, 114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995,684,556,190,918, 790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994,866,500,372, 917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993, 682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941,758,630,136, 992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212,1017,940, 812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266,1016, 939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470,342,887,704, 576,210,1015,938,810,444,316,989,678,550,184,912,784,418,290,963,652,524,158] [ns_server:debug,2014-08-19T16:49:53.498,ns_1@10.242.238.88:<0.31355.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 984 into 'ns_1@10.242.238.90' is <18125.20638.0> [ns_server:debug,2014-08-19T16:49:53.500,ns_1@10.242.238.88:<0.31355.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 984 into 'ns_1@10.242.238.91' is <18126.21305.0> [rebalance:debug,2014-08-19T16:49:53.500,ns_1@10.242.238.88:<0.31347.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 984 is <0.31355.0> [ns_server:debug,2014-08-19T16:49:53.537,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,528259}, tap_estimate, {replica_building,"default",984,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20638.0>, <<"replication_building_984_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:53.559,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,550333}, tap_estimate, {replica_building,"default",984,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21305.0>, <<"replication_building_984_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:53.560,ns_1@10.242.238.88:<0.31356.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.21305.0>}, {'ns_1@10.242.238.90',<18125.20638.0>}]) [rebalance:info,2014-08-19T16:49:53.560,ns_1@10.242.238.88:<0.31347.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:53.560,ns_1@10.242.238.88:<0.31347.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 984 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:53.561,ns_1@10.242.238.88:<0.31347.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:53.561,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{984, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:53.566,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{729, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:53.566,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",729, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.31368.0>) [ns_server:debug,2014-08-19T16:49:53.566,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 729) [ns_server:debug,2014-08-19T16:49:53.566,ns_1@10.242.238.88:<0.31369.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:53.566,ns_1@10.242.238.88:<0.31369.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:53.566,ns_1@10.242.238.88:<0.31368.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 729 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:53.567,ns_1@10.242.238.88:<0.31374.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 729 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:53.567,ns_1@10.242.238.88:<0.31375.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 729 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [views:debug,2014-08-19T16:49:53.569,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/879. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:53.570,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",879,active,0} [ns_server:debug,2014-08-19T16:49:53.570,ns_1@10.242.238.88:<0.31376.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 729 into 'ns_1@10.242.238.91' is <18126.21311.0> [ns_server:debug,2014-08-19T16:49:53.572,ns_1@10.242.238.88:<0.31376.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 729 into 'ns_1@10.242.238.90' is <18125.20645.0> [rebalance:debug,2014-08-19T16:49:53.572,ns_1@10.242.238.88:<0.31368.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 729 is <0.31376.0> [ns_server:debug,2014-08-19T16:49:53.614,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,605705}, tap_estimate, {replica_building,"default",729,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21311.0>, <<"replication_building_729_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:53.626,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,617382}, tap_estimate, {replica_building,"default",729,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20645.0>, <<"replication_building_729_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:53.626,ns_1@10.242.238.88:<0.31377.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.20645.0>}, {'ns_1@10.242.238.91',<18126.21311.0>}]) [rebalance:info,2014-08-19T16:49:53.627,ns_1@10.242.238.88:<0.31368.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:53.627,ns_1@10.242.238.88:<0.31368.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 729 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:53.628,ns_1@10.242.238.88:<0.31368.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:53.628,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{729, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:53.633,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{473, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:53.633,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",473, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.31389.0>) [ns_server:debug,2014-08-19T16:49:53.633,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 473) [ns_server:debug,2014-08-19T16:49:53.633,ns_1@10.242.238.88:<0.31390.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:53.634,ns_1@10.242.238.88:<0.31390.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:53.634,ns_1@10.242.238.88:<0.31389.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 473 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:53.634,ns_1@10.242.238.88:<0.31395.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 473 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:53.634,ns_1@10.242.238.88:<0.31396.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 473 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:53.638,ns_1@10.242.238.88:<0.31397.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 473 into 'ns_1@10.242.238.91' is <18126.21331.0> [ns_server:debug,2014-08-19T16:49:53.641,ns_1@10.242.238.88:<0.31397.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 473 into 'ns_1@10.242.238.89' is <18124.26035.0> [rebalance:debug,2014-08-19T16:49:53.641,ns_1@10.242.238.88:<0.31389.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 473 is <0.31397.0> [ns_server:debug,2014-08-19T16:49:53.678,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,669504}, tap_estimate, {replica_building,"default",473,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21331.0>, <<"replication_building_473_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:53.694,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,685669}, tap_estimate, {replica_building,"default",473,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26035.0>, <<"replication_building_473_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:53.695,ns_1@10.242.238.88:<0.31398.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26035.0>}, {'ns_1@10.242.238.91',<18126.21331.0>}]) [rebalance:info,2014-08-19T16:49:53.695,ns_1@10.242.238.88:<0.31389.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:53.695,ns_1@10.242.238.88:<0.31389.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 473 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:53.696,ns_1@10.242.238.88:<0.31389.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:53.696,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{473, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:53.701,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{983, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:53.701,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",983, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.31424.0>) [ns_server:debug,2014-08-19T16:49:53.701,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 983) [ns_server:debug,2014-08-19T16:49:53.702,ns_1@10.242.238.88:<0.31425.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:53.702,ns_1@10.242.238.88:<0.31425.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:53.702,ns_1@10.242.238.88:<0.31424.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 983 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:53.702,ns_1@10.242.238.88:<0.31430.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 983 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:53.702,ns_1@10.242.238.88:<0.31431.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 983 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:53.706,ns_1@10.242.238.88:<0.31432.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 983 into 'ns_1@10.242.238.90' is <18125.20665.0> [ns_server:debug,2014-08-19T16:49:53.709,ns_1@10.242.238.88:<0.31432.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 983 into 'ns_1@10.242.238.91' is <18126.21350.0> [rebalance:debug,2014-08-19T16:49:53.709,ns_1@10.242.238.88:<0.31424.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 983 is <0.31432.0> [ns_server:debug,2014-08-19T16:49:53.720,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 877. Nacking mccouch update. [views:debug,2014-08-19T16:49:53.720,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/877. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:53.720,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",877,active,0} [ns_server:debug,2014-08-19T16:49:53.721,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880, 386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462, 334,879,696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955, 644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592, 226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,668,540,174,902, 774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484, 356,901,718,590,224,952,824,458,330,692,564,198,1003,926,798,432,304,977,666, 538,172,900,772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248,120, 976,848,482,354,899,716,588,222,950,886,822,456,392,328,264,1014,937,754,690, 626,562,196,132,1001,988,924,860,796,494,430,366,302,975,911,728,664,600,536, 234,170,962,898,834,770,468,404,340,276,949,885,766,702,638,574,208,144,1013, 936,872,808,506,442,378,314,1000,987,923,740,676,612,548,246,182,118,974,910, 846,782,480,416,352,288,961,897,714,650,586,522,220,156,948,884,820,454,390, 326,262,1012,999,935,752,688,624,560,194,130,986,922,858,794,492,428,364,300, 973,909,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,764, 700,636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921,738,674,610, 546,244,180,116,972,908,844,780,478,414,350,286,959,895,712,648,584,520,218, 154,1023,946,882,818,452,388,324,260,1010,997,686,558,192,920,792,426,298, 971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608, 242,114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995,684,556,190, 918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994,866,500, 372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320, 993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941,758,630, 136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212,1017, 940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266, 1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470,342,887, 704,576,210,1015,938,810,444,316,989,678,550,184,912,784,418,290,963,652,524, 158] [ns_server:debug,2014-08-19T16:49:53.745,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,735999}, tap_estimate, {replica_building,"default",983,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20665.0>, <<"replication_building_983_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:53.763,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,754124}, tap_estimate, {replica_building,"default",983,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21350.0>, <<"replication_building_983_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:53.763,ns_1@10.242.238.88:<0.31433.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.21350.0>}, {'ns_1@10.242.238.90',<18125.20665.0>}]) [rebalance:info,2014-08-19T16:49:53.763,ns_1@10.242.238.88:<0.31424.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:53.764,ns_1@10.242.238.88:<0.31424.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 983 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:53.764,ns_1@10.242.238.88:<0.31424.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:53.765,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{983, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:53.769,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{728, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:53.769,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",728, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.31445.0>) [ns_server:debug,2014-08-19T16:49:53.770,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 728) [ns_server:debug,2014-08-19T16:49:53.770,ns_1@10.242.238.88:<0.31446.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:53.770,ns_1@10.242.238.88:<0.31446.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:53.770,ns_1@10.242.238.88:<0.31445.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 728 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [views:debug,2014-08-19T16:49:53.770,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/877. Updated state: active (0) [rebalance:info,2014-08-19T16:49:53.770,ns_1@10.242.238.88:<0.31451.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 728 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [ns_server:debug,2014-08-19T16:49:53.770,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",877,active,0} [rebalance:info,2014-08-19T16:49:53.770,ns_1@10.242.238.88:<0.31452.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 728 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:53.775,ns_1@10.242.238.88:<0.31453.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 728 into 'ns_1@10.242.238.91' is <18126.21356.0> [ns_server:debug,2014-08-19T16:49:53.777,ns_1@10.242.238.88:<0.31453.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 728 into 'ns_1@10.242.238.90' is <18125.20684.0> [rebalance:debug,2014-08-19T16:49:53.777,ns_1@10.242.238.88:<0.31445.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 728 is <0.31453.0> [ns_server:debug,2014-08-19T16:49:53.813,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,804086}, tap_estimate, {replica_building,"default",728,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21356.0>, <<"replication_building_728_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:53.831,ns_1@10.242.238.88:<0.31454.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.20684.0>}, {'ns_1@10.242.238.91',<18126.21356.0>}]) [rebalance:info,2014-08-19T16:49:53.831,ns_1@10.242.238.88:<0.31445.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:53.832,ns_1@10.242.238.88:<0.31445.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 728 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:53.832,ns_1@10.242.238.88:<0.31445.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:53.833,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{728, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:53.834,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,821354}, tap_estimate, {replica_building,"default",728,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20684.0>, <<"replication_building_728_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:53.837,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{472, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:53.838,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",472, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.31480.0>) [ns_server:debug,2014-08-19T16:49:53.838,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 472) [ns_server:debug,2014-08-19T16:49:53.838,ns_1@10.242.238.88:<0.31481.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:53.838,ns_1@10.242.238.88:<0.31481.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:53.838,ns_1@10.242.238.88:<0.31480.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 472 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:53.839,ns_1@10.242.238.88:<0.31486.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 472 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:53.839,ns_1@10.242.238.88:<0.31487.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 472 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:53.842,ns_1@10.242.238.88:<0.31488.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 472 into 'ns_1@10.242.238.91' is <18126.21375.0> [ns_server:debug,2014-08-19T16:49:53.844,ns_1@10.242.238.88:<0.31488.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 472 into 'ns_1@10.242.238.89' is <18124.26069.0> [rebalance:debug,2014-08-19T16:49:53.844,ns_1@10.242.238.88:<0.31480.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 472 is <0.31488.0> [ns_server:debug,2014-08-19T16:49:53.871,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 875. Nacking mccouch update. [views:debug,2014-08-19T16:49:53.871,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/875. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:53.871,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",875,active,0} [ns_server:debug,2014-08-19T16:49:53.872,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880, 386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462, 334,879,696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955, 644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592, 226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,668,540,174,902, 774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484, 356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,432,304,977, 666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248, 120,976,848,482,354,899,716,588,222,950,886,822,456,392,328,264,1014,937,754, 690,626,562,196,132,1001,988,924,860,796,494,430,366,302,975,911,728,664,600, 536,234,170,962,898,834,770,468,404,340,276,949,885,766,702,638,574,208,144, 1013,936,872,808,506,442,378,314,1000,987,923,740,676,612,548,246,182,118, 974,910,846,782,480,416,352,288,961,897,714,650,586,522,220,156,948,884,820, 454,390,326,262,1012,999,935,752,688,624,560,194,130,986,922,858,794,492,428, 364,300,973,909,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947, 883,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921,738, 674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,895,712,648,584, 520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192,920,792, 426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919, 736,608,242,114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995,684, 556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994, 866,500,372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814, 448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941, 758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578, 212,1017,940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888, 394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470, 342,887,704,576,210,1015,938,810,444,316,989,678,550,184,912,784,418,290,963, 652,524,158] [ns_server:debug,2014-08-19T16:49:53.880,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,871963}, tap_estimate, {replica_building,"default",472,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21375.0>, <<"replication_building_472_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:53.898,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,889095}, tap_estimate, {replica_building,"default",472,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26069.0>, <<"replication_building_472_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:53.898,ns_1@10.242.238.88:<0.31489.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26069.0>}, {'ns_1@10.242.238.91',<18126.21375.0>}]) [rebalance:info,2014-08-19T16:49:53.898,ns_1@10.242.238.88:<0.31480.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:53.899,ns_1@10.242.238.88:<0.31480.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 472 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:53.899,ns_1@10.242.238.88:<0.31480.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:53.900,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{472, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:53.904,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{982, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:53.904,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",982, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.31501.0>) [ns_server:debug,2014-08-19T16:49:53.905,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 982) [ns_server:debug,2014-08-19T16:49:53.905,ns_1@10.242.238.88:<0.31502.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:53.905,ns_1@10.242.238.88:<0.31502.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:53.905,ns_1@10.242.238.88:<0.31501.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 982 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:53.905,ns_1@10.242.238.88:<0.31507.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 982 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:53.906,ns_1@10.242.238.88:<0.31508.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 982 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:53.910,ns_1@10.242.238.88:<0.31509.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 982 into 'ns_1@10.242.238.90' is <18125.20704.0> [ns_server:debug,2014-08-19T16:49:53.912,ns_1@10.242.238.88:<0.31509.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 982 into 'ns_1@10.242.238.91' is <18126.21380.0> [rebalance:debug,2014-08-19T16:49:53.912,ns_1@10.242.238.88:<0.31501.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 982 is <0.31509.0> [views:debug,2014-08-19T16:49:53.930,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/875. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:53.930,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",875,active,0} [ns_server:debug,2014-08-19T16:49:53.948,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,939271}, tap_estimate, {replica_building,"default",982,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20704.0>, <<"replication_building_982_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:53.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452593,956668}, tap_estimate, {replica_building,"default",982,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21380.0>, <<"replication_building_982_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:53.966,ns_1@10.242.238.88:<0.31510.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.21380.0>}, {'ns_1@10.242.238.90',<18125.20704.0>}]) [rebalance:info,2014-08-19T16:49:53.966,ns_1@10.242.238.88:<0.31501.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:53.966,ns_1@10.242.238.88:<0.31501.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 982 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:53.967,ns_1@10.242.238.88:<0.31501.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:53.968,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{982, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:53.972,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{727, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:53.972,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",727, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.31522.0>) [ns_server:debug,2014-08-19T16:49:53.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 727) [ns_server:debug,2014-08-19T16:49:53.972,ns_1@10.242.238.88:<0.31523.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:53.973,ns_1@10.242.238.88:<0.31523.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:53.973,ns_1@10.242.238.88:<0.31522.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 727 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:53.973,ns_1@10.242.238.88:<0.31528.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 727 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:53.973,ns_1@10.242.238.88:<0.31529.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 727 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:53.977,ns_1@10.242.238.88:<0.31530.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 727 into 'ns_1@10.242.238.91' is <18126.21400.0> [ns_server:debug,2014-08-19T16:49:53.979,ns_1@10.242.238.88:<0.31530.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 727 into 'ns_1@10.242.238.90' is <18125.20709.0> [rebalance:debug,2014-08-19T16:49:53.980,ns_1@10.242.238.88:<0.31522.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 727 is <0.31530.0> [ns_server:debug,2014-08-19T16:49:54.014,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452594,5845}, tap_estimate, {replica_building,"default",727,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21400.0>, <<"replication_building_727_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:54.021,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 873. Nacking mccouch update. [views:debug,2014-08-19T16:49:54.021,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/873. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:54.022,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",873,active,0} [ns_server:debug,2014-08-19T16:49:54.023,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880, 386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462, 334,879,696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955, 644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592, 226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,668,540,174,902, 774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484, 356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,432,304,977, 666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248, 120,976,848,482,354,899,716,588,222,950,886,822,456,392,328,264,1014,937,873, 754,690,626,562,196,132,1001,988,924,860,796,494,430,366,302,975,911,728,664, 600,536,234,170,962,898,834,770,468,404,340,276,949,885,766,702,638,574,208, 144,1013,936,872,808,506,442,378,314,1000,987,923,740,676,612,548,246,182, 118,974,910,846,782,480,416,352,288,961,897,714,650,586,522,220,156,948,884, 820,454,390,326,262,1012,999,935,752,688,624,560,194,130,986,922,858,794,492, 428,364,300,973,909,726,662,598,534,232,168,960,896,832,768,466,402,338,274, 947,883,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921, 738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,895,712,648, 584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192,920, 792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374, 919,736,608,242,114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995, 684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138, 994,866,500,372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942, 814,448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018, 941,758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706, 578,212,1017,940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160, 888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,444,316,989,678,550,184,912,784,418,290, 963,652,524,158] [ns_server:debug,2014-08-19T16:49:54.034,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452594,25045}, tap_estimate, {replica_building,"default",727,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20709.0>, <<"replication_building_727_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:54.034,ns_1@10.242.238.88:<0.31531.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.20709.0>}, {'ns_1@10.242.238.91',<18126.21400.0>}]) [rebalance:info,2014-08-19T16:49:54.034,ns_1@10.242.238.88:<0.31522.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:54.035,ns_1@10.242.238.88:<0.31522.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 727 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:54.035,ns_1@10.242.238.88:<0.31522.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:54.036,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{727, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:54.040,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{471, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:54.040,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",471, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.31557.0>) [ns_server:debug,2014-08-19T16:49:54.041,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 471) [ns_server:debug,2014-08-19T16:49:54.041,ns_1@10.242.238.88:<0.31558.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:54.041,ns_1@10.242.238.88:<0.31558.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:54.041,ns_1@10.242.238.88:<0.31557.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 471 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:54.041,ns_1@10.242.238.88:<0.31563.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 471 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:54.041,ns_1@10.242.238.88:<0.31564.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 471 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:54.046,ns_1@10.242.238.88:<0.31565.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 471 into 'ns_1@10.242.238.91' is <18126.21405.0> [ns_server:debug,2014-08-19T16:49:54.048,ns_1@10.242.238.88:<0.31565.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 471 into 'ns_1@10.242.238.89' is <18124.26098.0> [rebalance:debug,2014-08-19T16:49:54.048,ns_1@10.242.238.88:<0.31557.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 471 is <0.31565.0> [views:debug,2014-08-19T16:49:54.055,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/873. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:54.055,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",873,active,0} [ns_server:debug,2014-08-19T16:49:54.084,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452594,75191}, tap_estimate, {replica_building,"default",471,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21405.0>, <<"replication_building_471_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:54.107,ns_1@10.242.238.88:<0.31566.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26098.0>}, {'ns_1@10.242.238.91',<18126.21405.0>}]) [rebalance:info,2014-08-19T16:49:54.108,ns_1@10.242.238.88:<0.31557.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:54.108,ns_1@10.242.238.88:<0.31557.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 471 on ns_1@10.242.238.88 [ns_server:debug,2014-08-19T16:49:54.108,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452594,98407}, tap_estimate, {replica_building,"default",471,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26098.0>, <<"replication_building_471_'ns_1@10.242.238.89'">>} [rebalance:info,2014-08-19T16:49:54.109,ns_1@10.242.238.88:<0.31557.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:54.109,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{471, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:54.113,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{981, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:54.114,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",981, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.31578.0>) [ns_server:debug,2014-08-19T16:49:54.114,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 981) [ns_server:debug,2014-08-19T16:49:54.114,ns_1@10.242.238.88:<0.31579.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:54.114,ns_1@10.242.238.88:<0.31579.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:54.114,ns_1@10.242.238.88:<0.31578.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 981 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:54.115,ns_1@10.242.238.88:<0.31584.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 981 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:54.115,ns_1@10.242.238.88:<0.31585.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 981 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:54.121,ns_1@10.242.238.88:<0.31586.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 981 into 'ns_1@10.242.238.90' is <18125.20729.0> [ns_server:debug,2014-08-19T16:49:54.122,ns_1@10.242.238.88:<0.31586.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 981 into 'ns_1@10.242.238.91' is <18126.21410.0> [rebalance:debug,2014-08-19T16:49:54.123,ns_1@10.242.238.88:<0.31578.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 981 is <0.31586.0> [ns_server:debug,2014-08-19T16:49:54.158,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452594,149904}, tap_estimate, {replica_building,"default",981,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20729.0>, <<"replication_building_981_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:54.176,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452594,167159}, tap_estimate, {replica_building,"default",981,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21410.0>, <<"replication_building_981_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:54.176,ns_1@10.242.238.88:<0.31587.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.21410.0>}, {'ns_1@10.242.238.90',<18125.20729.0>}]) [rebalance:info,2014-08-19T16:49:54.176,ns_1@10.242.238.88:<0.31578.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:54.177,ns_1@10.242.238.88:<0.31578.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 981 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:54.177,ns_1@10.242.238.88:<0.31578.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:54.178,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{981, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:54.182,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{726, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:54.183,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",726, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.31613.0>) [ns_server:debug,2014-08-19T16:49:54.183,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 726) [ns_server:debug,2014-08-19T16:49:54.183,ns_1@10.242.238.88:<0.31614.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:54.183,ns_1@10.242.238.88:<0.31614.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:49:54.183,ns_1@10.242.238.88:<0.31613.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 726 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:54.184,ns_1@10.242.238.88:<0.31619.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 726 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:54.184,ns_1@10.242.238.88:<0.31620.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 726 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:54.188,ns_1@10.242.238.88:<0.31621.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 726 into 'ns_1@10.242.238.91' is <18126.21430.0> [ns_server:debug,2014-08-19T16:49:54.189,ns_1@10.242.238.88:<0.31621.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 726 into 'ns_1@10.242.238.90' is <18125.20734.0> [rebalance:debug,2014-08-19T16:49:54.190,ns_1@10.242.238.88:<0.31613.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 726 is <0.31621.0> [ns_server:debug,2014-08-19T16:49:54.225,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452594,216275}, tap_estimate, {replica_building,"default",726,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21430.0>, <<"replication_building_726_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:54.242,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452594,233911}, tap_estimate, {replica_building,"default",726,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20734.0>, <<"replication_building_726_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:54.243,ns_1@10.242.238.88:<0.31622.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.20734.0>}, {'ns_1@10.242.238.91',<18126.21430.0>}]) [rebalance:info,2014-08-19T16:49:54.243,ns_1@10.242.238.88:<0.31613.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:49:54.244,ns_1@10.242.238.88:<0.31613.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 726 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:54.244,ns_1@10.242.238.88:<0.31613.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:54.245,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{726, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:54.248,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 871. Nacking mccouch update. [views:debug,2014-08-19T16:49:54.248,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/871. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:54.248,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",871,active,0} [ns_server:debug,2014-08-19T16:49:54.249,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{470, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:49:54.249,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",470, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.31634.0>) [ns_server:debug,2014-08-19T16:49:54.249,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 470) [ns_server:debug,2014-08-19T16:49:54.250,ns_1@10.242.238.88:<0.31635.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:54.250,ns_1@10.242.238.88:<0.31635.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:49:54.250,ns_1@10.242.238.88:<0.31634.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 470 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:54.250,ns_1@10.242.238.88:<0.31640.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 470 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:54.250,ns_1@10.242.238.88:<0.31641.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 470 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:54.250,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880, 386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462, 334,879,696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955, 644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592, 226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,668,540,174,902, 774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484, 356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,432,304,977, 666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248, 120,976,848,482,354,899,716,588,222,950,886,822,456,392,328,264,1014,937,873, 754,690,626,562,196,132,1001,988,924,860,796,494,430,366,302,975,911,728,664, 600,536,234,170,962,898,834,770,468,404,340,276,949,885,766,702,638,574,208, 144,1013,936,872,808,506,442,378,314,1000,987,923,740,676,612,548,246,182, 118,974,910,846,782,480,416,352,288,961,897,714,650,586,522,220,156,948,884, 820,454,390,326,262,1012,999,935,871,752,688,624,560,194,130,986,922,858,794, 492,428,364,300,973,909,726,662,598,534,232,168,960,896,832,768,466,402,338, 274,947,883,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312,985, 921,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,895,712, 648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,686,558,192, 920,792,426,298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502, 374,919,736,608,242,114,970,842,476,348,893,710,582,216,1021,944,816,450,322, 995,684,556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632, 138,994,866,500,372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019, 942,814,448,320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268, 1018,941,758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889, 706,578,212,1017,940,812,446,318,991,680,552,186,914,786,420,292,965,654,526, 160,888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964, 836,470,342,887,704,576,210,1015,938,810,444,316,989,678,550,184,912,784,418, 290,963,652,524,158] [ns_server:debug,2014-08-19T16:49:54.254,ns_1@10.242.238.88:<0.31642.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 470 into 'ns_1@10.242.238.91' is <18126.21435.0> [ns_server:debug,2014-08-19T16:49:54.256,ns_1@10.242.238.88:<0.31642.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 470 into 'ns_1@10.242.238.89' is <18124.26118.0> [rebalance:debug,2014-08-19T16:49:54.257,ns_1@10.242.238.88:<0.31634.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 470 is <0.31642.0> [views:debug,2014-08-19T16:49:54.282,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/871. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:54.282,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",871,active,0} [ns_server:debug,2014-08-19T16:49:54.293,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452594,284631}, tap_estimate, {replica_building,"default",470,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21435.0>, <<"replication_building_470_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:49:54.309,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452594,300624}, tap_estimate, {replica_building,"default",470,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26118.0>, <<"replication_building_470_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:49:54.310,ns_1@10.242.238.88:<0.31643.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26118.0>}, {'ns_1@10.242.238.91',<18126.21435.0>}]) [rebalance:info,2014-08-19T16:49:54.310,ns_1@10.242.238.88:<0.31634.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:49:54.310,ns_1@10.242.238.88:<0.31634.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 470 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:54.311,ns_1@10.242.238.88:<0.31634.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:54.311,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{470, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:49:54.316,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{980, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:49:54.316,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",980, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.31655.0>) [ns_server:debug,2014-08-19T16:49:54.316,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 980) [ns_server:debug,2014-08-19T16:49:54.316,ns_1@10.242.238.88:<0.31656.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:49:54.316,ns_1@10.242.238.88:<0.31656.0>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:49:54.317,ns_1@10.242.238.88:<0.31655.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 980 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:49:54.317,ns_1@10.242.238.88:<0.31661.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 980 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:49:54.317,ns_1@10.242.238.88:<0.31662.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 980 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:49:54.322,ns_1@10.242.238.88:<0.31669.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 980 into 'ns_1@10.242.238.90' is <18125.20754.0> [ns_server:debug,2014-08-19T16:49:54.324,ns_1@10.242.238.88:<0.31669.0>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 980 into 'ns_1@10.242.238.91' is <18126.21440.0> [rebalance:debug,2014-08-19T16:49:54.324,ns_1@10.242.238.88:<0.31655.0>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 980 is <0.31669.0> [ns_server:debug,2014-08-19T16:49:54.358,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 869. Nacking mccouch update. [views:debug,2014-08-19T16:49:54.359,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/869. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:54.359,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",869,active,0} [ns_server:debug,2014-08-19T16:49:54.360,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452594,351398}, tap_estimate, {replica_building,"default",980,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.20754.0>, <<"replication_building_980_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:49:54.360,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880, 386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462, 334,879,696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955, 644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592, 226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,668,540,174,902, 774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484, 356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,432,304,977, 666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248, 120,976,848,482,354,899,716,588,222,950,822,456,328,937,873,754,690,626,562, 196,132,1001,988,924,860,796,494,430,366,302,975,911,728,664,600,536,234,170, 962,898,834,770,468,404,340,276,949,885,766,702,638,574,208,144,1013,936,872, 808,506,442,378,314,1000,987,923,740,676,612,548,246,182,118,974,910,846,782, 480,416,352,288,961,897,714,650,586,522,220,156,948,884,820,454,390,326,262, 1012,999,935,871,752,688,624,560,194,130,986,922,858,794,492,428,364,300,973, 909,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,764,700, 636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921,738,674,610,546, 244,180,116,972,908,844,780,478,414,350,286,959,895,712,648,584,520,218,154, 1023,946,882,818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298, 971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608, 242,114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995,684,556,190, 918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994,866,500, 372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320, 993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941,758,630, 136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212,1017, 940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394,266, 1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470,342,887, 704,576,210,1015,938,810,444,316,989,678,550,184,912,784,418,290,963,652,524, 158,886,392,264,1014] [ns_server:debug,2014-08-19T16:49:54.380,ns_1@10.242.238.88:<0.31677.0>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.21440.0>}, {'ns_1@10.242.238.90',<18125.20754.0>}]) [rebalance:info,2014-08-19T16:49:54.380,ns_1@10.242.238.88:<0.31655.0>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:49:54.380,ns_1@10.242.238.88:<0.31655.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 980 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:54.381,ns_1@10.242.238.88:<0.31655.0>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:49:54.381,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{980, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:49:54.382,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:49:54.384,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452594,370817}, tap_estimate, {replica_building,"default",980,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.21440.0>, <<"replication_building_980_'ns_1@10.242.238.91'">>} [views:debug,2014-08-19T16:49:54.409,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/869. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:54.409,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",869,active,0} [ns_server:debug,2014-08-19T16:49:54.484,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 867. Nacking mccouch update. [views:debug,2014-08-19T16:49:54.485,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/867. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:54.485,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",867,active,0} [ns_server:debug,2014-08-19T16:49:54.486,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880, 386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462, 334,879,696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955, 644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592, 226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,668,540,174,902, 774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484, 356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,432,304,977, 666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248, 120,976,848,482,354,899,716,588,222,950,822,456,328,937,873,754,690,626,562, 196,132,1001,988,924,860,796,494,430,366,302,975,911,728,664,600,536,234,170, 962,898,834,770,468,404,340,276,949,885,766,702,638,574,208,144,1013,936,872, 808,506,442,378,314,1000,987,923,740,676,612,548,246,182,118,974,910,846,782, 480,416,352,288,961,897,714,650,586,522,220,156,948,884,820,454,390,326,262, 1012,999,935,871,752,688,624,560,194,130,986,922,858,794,492,428,364,300,973, 909,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,764,700, 636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921,738,674,610,546, 244,180,116,972,908,844,780,478,414,350,286,959,895,712,648,584,520,218,154, 1023,946,882,818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298, 971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608, 242,114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556, 190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994,866, 500,372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448, 320,993,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941,758, 630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212, 1017,940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888,394, 266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470,342, 887,704,576,210,1015,938,810,444,316,989,678,550,184,912,784,418,290,963,652, 524,158,886,392,264,1014] [views:debug,2014-08-19T16:49:54.518,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/867. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:54.518,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",867,active,0} [ns_server:debug,2014-08-19T16:49:54.619,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 865. Nacking mccouch update. [views:debug,2014-08-19T16:49:54.619,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/865. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:54.619,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",865,active,0} [ns_server:debug,2014-08-19T16:49:54.620,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880, 386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462, 334,879,696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955, 644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592, 226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,668,540,174,902, 774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484, 356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,432,304,977, 666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248, 120,976,848,482,354,899,716,588,222,950,822,456,328,937,873,754,690,626,562, 196,132,1001,988,924,860,796,494,430,366,302,975,911,728,664,600,536,234,170, 962,898,834,770,468,404,340,276,949,885,766,702,638,574,208,144,1013,936,872, 808,506,442,378,314,1000,987,923,740,676,612,548,246,182,118,974,910,846,782, 480,416,352,288,961,897,714,650,586,522,220,156,948,884,820,454,390,326,262, 1012,999,935,871,752,688,624,560,194,130,986,922,858,794,492,428,364,300,973, 909,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,764,700, 636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921,738,674,610,546, 244,180,116,972,908,844,780,478,414,350,286,959,895,712,648,584,520,218,154, 1023,946,882,818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298, 971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608, 242,114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556, 190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994,866, 500,372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448, 320,993,865,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941, 758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578, 212,1017,940,812,446,318,991,680,552,186,914,786,420,292,965,654,526,160,888, 394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470, 342,887,704,576,210,1015,938,810,444,316,989,678,550,184,912,784,418,290,963, 652,524,158,886,392,264,1014] [views:debug,2014-08-19T16:49:54.653,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/865. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:54.653,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",865,active,0} [ns_server:debug,2014-08-19T16:49:54.744,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 863. Nacking mccouch update. [views:debug,2014-08-19T16:49:54.744,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/863. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:54.745,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",863,active,0} [ns_server:debug,2014-08-19T16:49:54.746,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880, 386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462, 334,879,696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955, 644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592, 226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,668,540,174,902, 774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484, 356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,432,304,977, 666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248, 120,976,848,482,354,899,716,588,222,950,822,456,328,937,873,754,690,626,562, 196,132,1001,988,924,860,796,494,430,366,302,975,911,728,664,600,536,234,170, 962,898,834,770,468,404,340,276,949,885,766,702,638,574,208,144,1013,936,872, 808,506,442,378,314,1000,987,923,740,676,612,548,246,182,118,974,910,846,782, 480,416,352,288,961,897,714,650,586,522,220,156,948,884,820,454,390,326,262, 1012,999,935,871,752,688,624,560,194,130,986,922,858,794,492,428,364,300,973, 909,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,764,700, 636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921,738,674,610,546, 244,180,116,972,908,844,780,478,414,350,286,959,895,712,648,584,520,218,154, 1023,946,882,818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298, 971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608, 242,114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556, 190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994,866, 500,372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448, 320,993,865,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941, 758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578, 212,1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,654,526,160, 888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,444,316,989,678,550,184,912,784,418,290, 963,652,524,158,886,392,264,1014] [views:debug,2014-08-19T16:49:54.778,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/863. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:54.779,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",863,active,0} [ns_server:debug,2014-08-19T16:49:54.939,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 861. Nacking mccouch update. [views:debug,2014-08-19T16:49:54.940,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/861. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:54.940,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",861,active,0} [ns_server:debug,2014-08-19T16:49:54.941,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880, 386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462, 334,879,696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955, 644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592, 226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,668,540,174,902, 774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484, 356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,432,304,977, 666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248, 120,976,848,482,354,899,716,588,222,950,822,456,328,937,873,754,690,626,562, 196,132,1001,988,924,860,796,494,430,366,302,975,911,728,664,600,536,234,170, 962,898,834,770,468,404,340,276,949,885,766,702,638,574,208,144,1013,936,872, 808,506,442,378,314,1000,987,923,740,676,612,548,246,182,118,974,910,846,782, 480,416,352,288,961,897,714,650,586,522,220,156,948,884,820,454,390,326,262, 1012,999,935,871,752,688,624,560,194,130,986,922,858,794,492,428,364,300,973, 909,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,764,700, 636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921,738,674,610,546, 244,180,116,972,908,844,780,478,414,350,286,959,895,712,648,584,520,218,154, 1023,946,882,818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298, 971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608, 242,114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556, 190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994,866, 500,372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448, 320,993,865,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941, 758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578, 212,1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,654,526,160, 888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418, 290,963,652,524,158,886,392,264,1014] [views:debug,2014-08-19T16:49:55.023,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/861. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:55.024,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",861,active,0} [ns_server:debug,2014-08-19T16:49:55.199,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 859. Nacking mccouch update. [views:debug,2014-08-19T16:49:55.199,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/859. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:55.199,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",859,active,0} [ns_server:debug,2014-08-19T16:49:55.200,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880, 386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462, 334,879,696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955, 644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592, 226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,668,540,174,902, 774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484, 356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,432,304,977, 666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248, 120,976,848,482,354,899,716,588,222,950,822,456,328,873,690,562,196,1001,988, 924,860,796,494,430,366,302,975,911,728,664,600,536,234,170,962,898,834,770, 468,404,340,276,949,885,766,702,638,574,208,144,1013,936,872,808,506,442,378, 314,1000,987,923,859,740,676,612,548,246,182,118,974,910,846,782,480,416,352, 288,961,897,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,935, 871,752,688,624,560,194,130,986,922,858,794,492,428,364,300,973,909,726,662, 598,534,232,168,960,896,832,768,466,402,338,274,947,883,764,700,636,572,206, 142,1011,998,934,870,806,504,440,376,312,985,921,738,674,610,546,244,180,116, 972,908,844,780,478,414,350,286,959,895,712,648,584,520,218,154,1023,946,882, 818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298,971,660,532,166, 894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608,242,114,970,842, 476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424, 296,969,658,530,164,892,398,270,1020,943,760,632,138,994,866,500,372,917,734, 606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865,682, 554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941,758,630,136,992, 864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812, 446,318,991,863,680,552,186,914,786,420,292,965,654,526,160,888,394,266,1016, 939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470,342,887,704, 576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418,290,963,652,524, 158,886,392,264,1014,937,754,626,132] [views:debug,2014-08-19T16:49:55.274,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/859. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:55.274,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",859,active,0} [ns_server:debug,2014-08-19T16:49:55.416,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 857. Nacking mccouch update. [views:debug,2014-08-19T16:49:55.416,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/857. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:55.417,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",857,active,0} [ns_server:debug,2014-08-19T16:49:55.418,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,672,544,178,906,778,412,284,957,646,518,152,880, 386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828,462, 334,879,696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282,955, 644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720,592, 226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,668,540,174,902, 774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850,484, 356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,432,304,977, 666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002,925,742,614,248, 120,976,848,482,354,899,716,588,222,950,822,456,328,873,690,562,196,1001,988, 924,860,796,494,430,366,302,975,911,728,664,600,536,234,170,962,898,834,770, 468,404,340,276,949,885,766,702,638,574,208,144,1013,936,872,808,506,442,378, 314,1000,987,923,859,740,676,612,548,246,182,118,974,910,846,782,480,416,352, 288,961,897,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,935, 871,752,688,624,560,194,130,986,922,858,794,492,428,364,300,973,909,726,662, 598,534,232,168,960,896,832,768,466,402,338,274,947,883,764,700,636,572,206, 142,1011,998,934,870,806,504,440,376,312,985,921,857,738,674,610,546,244,180, 116,972,908,844,780,478,414,350,286,959,895,712,648,584,520,218,154,1023,946, 882,818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298,971,660,532, 166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608,242,114,970, 842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790, 424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994,866,500,372,917, 734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865, 682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941,758,630,136, 992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212,1017,940, 812,446,318,991,863,680,552,186,914,786,420,292,965,654,526,160,888,394,266, 1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470,342,887, 704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418,290,963,652, 524,158,886,392,264,1014,937,754,626,132] [views:debug,2014-08-19T16:49:55.493,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/857. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:55.493,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",857,active,0} [ns_server:debug,2014-08-19T16:49:55.592,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 855. Nacking mccouch update. [views:debug,2014-08-19T16:49:55.592,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/855. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:55.592,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",855,active,0} [ns_server:debug,2014-08-19T16:49:55.593,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,646,518,152, 880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828, 462,334,879,696,568,202,1007,930,802,436,308,981,670,542,176,904,776,410,282, 955,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903,720, 592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,668,540,174, 902,774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122,978,850, 484,356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,432,304, 977,666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002,925,742,614, 248,120,976,848,482,354,899,716,588,222,950,822,456,328,873,690,562,196,1001, 988,924,860,796,494,430,366,302,975,911,728,664,600,536,234,170,962,898,834, 770,468,404,340,276,949,885,766,702,638,574,208,144,1013,936,872,808,506,442, 378,314,1000,987,923,859,740,676,612,548,246,182,118,974,910,846,782,480,416, 352,288,961,897,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999, 935,871,752,688,624,560,194,130,986,922,858,794,492,428,364,300,973,909,726, 662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,764,700,636,572, 206,142,1011,998,934,870,806,504,440,376,312,985,921,857,738,674,610,546,244, 180,116,972,908,844,780,478,414,350,286,959,895,712,648,584,520,218,154,1023, 946,882,818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298,971,660, 532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608,242,114, 970,842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918, 790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994,866,500,372, 917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993, 865,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941,758,630, 136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212,1017, 940,812,446,318,991,863,680,552,186,914,786,420,292,965,654,526,160,888,394, 266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470,342, 887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418,290,963, 652,524,158,886,392,264,1014,937,754,626,132] [views:debug,2014-08-19T16:49:55.651,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/855. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:55.652,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",855,active,0} [ns_server:debug,2014-08-19T16:49:55.761,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 853. Nacking mccouch update. [views:debug,2014-08-19T16:49:55.762,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/853. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:55.762,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",853,active,0} [ns_server:debug,2014-08-19T16:49:55.763,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,646,518,152, 880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828, 462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776,410, 282,955,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903, 720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,668,540, 174,902,774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122,978, 850,484,356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,432, 304,977,666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002,925,742, 614,248,120,976,848,482,354,899,716,588,222,950,822,456,328,873,690,562,196, 1001,988,924,860,796,494,430,366,302,975,911,728,664,600,536,234,170,962,898, 834,770,468,404,340,276,949,885,766,702,638,574,208,144,1013,936,872,808,506, 442,378,314,1000,987,923,859,740,676,612,548,246,182,118,974,910,846,782,480, 416,352,288,961,897,714,650,586,522,220,156,948,884,820,454,390,326,262,1012, 999,935,871,752,688,624,560,194,130,986,922,858,794,492,428,364,300,973,909, 726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,764,700,636, 572,206,142,1011,998,934,870,806,504,440,376,312,985,921,857,738,674,610,546, 244,180,116,972,908,844,780,478,414,350,286,959,895,712,648,584,520,218,154, 1023,946,882,818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298, 971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608, 242,114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556, 190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994,866, 500,372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448, 320,993,865,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941, 758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578, 212,1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,654,526,160, 888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418, 290,963,652,524,158,886,392,264,1014,937,754,626,132] [views:debug,2014-08-19T16:49:55.796,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/853. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:55.796,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",853,active,0} [ns_server:debug,2014-08-19T16:49:55.896,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 851. Nacking mccouch update. [views:debug,2014-08-19T16:49:55.896,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/851. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:55.896,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",851,active,0} [ns_server:debug,2014-08-19T16:49:55.897,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,646,518,152, 880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828, 462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776,410, 282,955,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903, 720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,851,668, 540,174,902,774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122, 978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798, 432,304,977,666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002,925, 742,614,248,120,976,848,482,354,899,716,588,222,950,822,456,328,873,690,562, 196,1001,988,924,860,796,494,430,366,302,975,911,728,664,600,536,234,170,962, 898,834,770,468,404,340,276,949,885,766,702,638,574,208,144,1013,936,872,808, 506,442,378,314,1000,987,923,859,740,676,612,548,246,182,118,974,910,846,782, 480,416,352,288,961,897,714,650,586,522,220,156,948,884,820,454,390,326,262, 1012,999,935,871,752,688,624,560,194,130,986,922,858,794,492,428,364,300,973, 909,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,764,700, 636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921,857,738,674,610, 546,244,180,116,972,908,844,780,478,414,350,286,959,895,712,648,584,520,218, 154,1023,946,882,818,452,388,324,260,1010,997,869,686,558,192,920,792,426, 298,971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736, 608,242,114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684, 556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994, 866,500,372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814, 448,320,993,865,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018, 941,758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706, 578,212,1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,654,526, 160,888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964, 836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784, 418,290,963,652,524,158,886,392,264,1014,937,754,626,132] [views:debug,2014-08-19T16:49:55.929,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/851. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:55.930,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",851,active,0} [ns_server:debug,2014-08-19T16:49:56.005,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 849. Nacking mccouch update. [views:debug,2014-08-19T16:49:56.005,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/849. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:56.005,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",849,active,0} [ns_server:debug,2014-08-19T16:49:56.006,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,646,518,152, 880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828, 462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776,410, 282,955,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903, 720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,851,668, 540,174,902,774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122, 978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798, 432,304,977,849,666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002, 925,742,614,248,120,976,848,482,354,899,716,588,222,950,822,456,328,873,690, 562,196,1001,924,796,430,302,975,911,728,664,600,536,234,170,962,898,834,770, 468,404,340,276,949,885,766,702,638,574,208,144,1013,936,872,808,506,442,378, 314,1000,987,923,859,740,676,612,548,246,182,118,974,910,846,782,480,416,352, 288,961,897,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,935, 871,752,688,624,560,194,130,986,922,858,794,492,428,364,300,973,909,726,662, 598,534,232,168,960,896,832,768,466,402,338,274,947,883,764,700,636,572,206, 142,1011,998,934,870,806,504,440,376,312,985,921,857,738,674,610,546,244,180, 116,972,908,844,780,478,414,350,286,959,895,712,648,584,520,218,154,1023,946, 882,818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298,971,660,532, 166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608,242,114,970, 842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790, 424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994,866,500,372,917, 734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865, 682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941,758,630,136, 992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212,1017,940, 812,446,318,991,863,680,552,186,914,786,420,292,965,654,526,160,888,394,266, 1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470,342,887, 704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418,290,963,652, 524,158,886,392,264,1014,937,754,626,132,988,860,494,366] [views:debug,2014-08-19T16:49:56.038,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/849. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:56.039,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",849,active,0} [ns_server:debug,2014-08-19T16:49:56.114,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 847. Nacking mccouch update. [views:debug,2014-08-19T16:49:56.114,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/847. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:56.114,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",847,active,0} [ns_server:debug,2014-08-19T16:49:56.115,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,646,518,152, 880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828, 462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776,410, 282,955,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903, 720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,851,668, 540,174,902,774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122, 978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798, 432,304,977,849,666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002, 925,742,614,248,120,976,848,482,354,899,716,588,222,950,822,456,328,873,690, 562,196,1001,924,796,430,302,975,911,847,728,664,600,536,234,170,962,898,834, 770,468,404,340,276,949,885,766,702,638,574,208,144,1013,936,872,808,506,442, 378,314,1000,987,923,859,740,676,612,548,246,182,118,974,910,846,782,480,416, 352,288,961,897,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999, 935,871,752,688,624,560,194,130,986,922,858,794,492,428,364,300,973,909,726, 662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,764,700,636,572, 206,142,1011,998,934,870,806,504,440,376,312,985,921,857,738,674,610,546,244, 180,116,972,908,844,780,478,414,350,286,959,895,712,648,584,520,218,154,1023, 946,882,818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298,971,660, 532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608,242,114, 970,842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918, 790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994,866,500,372, 917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993, 865,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941,758,630, 136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212,1017, 940,812,446,318,991,863,680,552,186,914,786,420,292,965,654,526,160,888,394, 266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470,342, 887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418,290,963, 652,524,158,886,392,264,1014,937,754,626,132,988,860,494,366] [views:debug,2014-08-19T16:49:56.148,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/847. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:56.148,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",847,active,0} [ns_server:debug,2014-08-19T16:49:56.223,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 845. Nacking mccouch update. [views:debug,2014-08-19T16:49:56.223,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/845. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:56.223,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",845,active,0} [ns_server:debug,2014-08-19T16:49:56.224,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,646,518,152, 880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828, 462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776,410, 282,955,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903, 720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,851,668, 540,174,902,774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122, 978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798, 432,304,977,849,666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002, 925,742,614,248,120,976,848,482,354,899,716,588,222,950,822,456,328,873,690, 562,196,1001,924,796,430,302,975,911,847,728,664,600,536,234,170,962,898,834, 770,468,404,340,276,949,885,766,702,638,574,208,144,1013,936,872,808,506,442, 378,314,1000,987,923,859,740,676,612,548,246,182,118,974,910,846,782,480,416, 352,288,961,897,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999, 935,871,752,688,624,560,194,130,986,922,858,794,492,428,364,300,973,909,845, 726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,764,700,636, 572,206,142,1011,998,934,870,806,504,440,376,312,985,921,857,738,674,610,546, 244,180,116,972,908,844,780,478,414,350,286,959,895,712,648,584,520,218,154, 1023,946,882,818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298, 971,660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608, 242,114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556, 190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994,866, 500,372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448, 320,993,865,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018,941, 758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578, 212,1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,654,526,160, 888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418, 290,963,652,524,158,886,392,264,1014,937,754,626,132,988,860,494,366] [views:debug,2014-08-19T16:49:56.268,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/845. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:56.268,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",845,active,0} [ns_server:debug,2014-08-19T16:49:56.451,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 843. Nacking mccouch update. [views:debug,2014-08-19T16:49:56.452,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/843. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:56.452,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",843,active,0} [ns_server:debug,2014-08-19T16:49:56.453,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,646,518,152, 880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828, 462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776,410, 282,955,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903, 720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,851,668, 540,174,902,774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122, 978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798, 432,304,977,849,666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002, 925,742,614,248,120,976,848,482,354,899,716,588,222,950,822,456,328,873,690, 562,196,1001,924,796,430,302,975,911,847,728,664,600,536,234,170,962,898,834, 770,468,404,340,276,949,885,766,702,638,574,208,144,1013,936,872,808,506,442, 378,314,1000,987,923,859,740,676,612,548,246,182,118,974,910,846,782,480,416, 352,288,961,897,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999, 935,871,752,688,624,560,194,130,986,922,858,794,492,428,364,300,973,909,845, 726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,764,700,636, 572,206,142,1011,998,934,870,806,504,440,376,312,985,921,857,738,674,610,546, 244,180,116,972,908,844,780,478,414,350,286,959,895,712,648,584,520,218,154, 1023,946,882,818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298, 971,843,660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736, 608,242,114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684, 556,190,918,790,424,296,969,658,530,164,892,398,270,1020,943,760,632,138,994, 866,500,372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814, 448,320,993,865,682,554,188,916,788,422,294,967,656,528,162,890,396,268,1018, 941,758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706, 578,212,1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,654,526, 160,888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964, 836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784, 418,290,963,652,524,158,886,392,264,1014,937,754,626,132,988,860,494,366] [views:debug,2014-08-19T16:49:56.570,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/843. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:56.570,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",843,active,0} [ns_server:debug,2014-08-19T16:49:56.753,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 841. Nacking mccouch update. [views:debug,2014-08-19T16:49:56.754,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/841. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:56.754,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",841,active,0} [ns_server:debug,2014-08-19T16:49:56.755,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,646,518,152, 880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828, 462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776,410, 282,955,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903, 720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,851,668, 540,174,902,774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122, 978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798, 432,304,977,849,666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002, 925,742,614,248,120,976,848,482,354,899,716,588,222,950,822,456,328,873,690, 562,196,1001,924,796,430,302,975,911,847,728,664,600,536,234,170,962,898,834, 770,468,404,340,276,949,885,766,702,638,574,208,144,1013,936,872,808,506,442, 378,314,1000,987,923,859,740,676,612,548,246,182,118,974,910,846,782,480,416, 352,288,961,897,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999, 935,871,752,688,624,560,194,130,986,922,858,794,492,428,364,300,973,909,845, 726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,764,700,636, 572,206,142,1011,998,934,870,806,504,440,376,312,985,921,857,738,674,610,546, 244,180,116,972,908,844,780,478,414,350,286,959,895,712,648,584,520,218,154, 1023,946,882,818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298, 971,843,660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736, 608,242,114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684, 556,190,918,790,424,296,969,841,658,530,164,892,398,270,1020,943,760,632,138, 994,866,500,372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942, 814,448,320,993,865,682,554,188,916,788,422,294,967,656,528,162,890,396,268, 1018,941,758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889, 706,578,212,1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,654, 526,160,888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108, 964,836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912, 784,418,290,963,652,524,158,886,392,264,1014,937,754,626,132,988,860,494,366] [views:debug,2014-08-19T16:49:56.829,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/841. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:56.829,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",841,active,0} [ns_server:debug,2014-08-19T16:49:56.938,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 839. Nacking mccouch update. [views:debug,2014-08-19T16:49:56.938,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/839. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:56.938,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",839,active,0} [ns_server:debug,2014-08-19T16:49:56.939,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,646,518,152, 880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828, 462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776,410, 282,955,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903, 720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,851,668, 540,174,902,774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122, 978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798, 432,304,977,849,666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002, 925,742,614,248,120,976,848,482,354,899,716,588,222,950,822,456,328,873,690, 562,196,1001,924,796,430,302,975,847,664,536,170,962,898,834,770,468,404,340, 276,949,885,766,702,638,574,208,144,1013,936,872,808,506,442,378,314,1000, 987,923,859,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961, 897,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,935,871,752, 688,624,560,194,130,986,922,858,794,492,428,364,300,973,909,845,726,662,598, 534,232,168,960,896,832,768,466,402,338,274,947,883,764,700,636,572,206,142, 1011,998,934,870,806,504,440,376,312,985,921,857,738,674,610,546,244,180,116, 972,908,844,780,478,414,350,286,959,895,712,648,584,520,218,154,1023,946,882, 818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298,971,843,660,532, 166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608,242,114,970, 842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790, 424,296,969,841,658,530,164,892,398,270,1020,943,760,632,138,994,866,500,372, 917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993, 865,682,554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,758, 630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212, 1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,654,526,160,888, 394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470, 342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418,290, 963,652,524,158,886,392,264,1014,937,754,626,132,988,860,494,366,911,728,600, 234] [views:debug,2014-08-19T16:49:56.972,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/839. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:56.972,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",839,active,0} [ns_server:debug,2014-08-19T16:49:57.047,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 837. Nacking mccouch update. [views:debug,2014-08-19T16:49:57.047,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/837. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:57.047,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",837,active,0} [ns_server:debug,2014-08-19T16:49:57.048,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,646,518,152, 880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828, 462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776,410, 282,955,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903, 720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,851,668, 540,174,902,774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122, 978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798, 432,304,977,849,666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002, 925,742,614,248,120,976,848,482,354,899,716,588,222,950,822,456,328,873,690, 562,196,1001,924,796,430,302,975,847,664,536,170,962,898,834,770,468,404,340, 276,949,885,766,702,638,574,208,144,1013,936,872,808,506,442,378,314,1000, 987,923,859,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961, 897,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,935,871,752, 688,624,560,194,130,986,922,858,794,492,428,364,300,973,909,845,726,662,598, 534,232,168,960,896,832,768,466,402,338,274,947,883,764,700,636,572,206,142, 1011,998,934,870,806,504,440,376,312,985,921,857,738,674,610,546,244,180,116, 972,908,844,780,478,414,350,286,959,895,712,648,584,520,218,154,1023,946,882, 818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298,971,843,660,532, 166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608,242,114,970, 842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790, 424,296,969,841,658,530,164,892,398,270,1020,943,760,632,138,994,866,500,372, 917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993, 865,682,554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,758, 630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212, 1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160, 888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418, 290,963,652,524,158,886,392,264,1014,937,754,626,132,988,860,494,366,911,728, 600,234] [views:debug,2014-08-19T16:49:57.081,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/837. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:57.081,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",837,active,0} [ns_server:debug,2014-08-19T16:49:57.223,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 835. Nacking mccouch update. [views:debug,2014-08-19T16:49:57.223,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/835. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:57.224,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",835,active,0} [ns_server:debug,2014-08-19T16:49:57.225,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,646,518,152, 880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828, 462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776,410, 282,955,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903, 720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,851,668, 540,174,902,774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122, 978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798, 432,304,977,849,666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002, 925,742,614,248,120,976,848,482,354,899,716,588,222,950,822,456,328,873,690, 562,196,1001,924,796,430,302,975,847,664,536,170,962,898,834,770,468,404,340, 276,949,885,766,702,638,574,208,144,1013,936,872,808,506,442,378,314,1000, 987,923,859,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961, 897,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,935,871,752, 688,624,560,194,130,986,922,858,794,492,428,364,300,973,909,845,726,662,598, 534,232,168,960,896,832,768,466,402,338,274,947,883,764,700,636,572,206,142, 1011,998,934,870,806,504,440,376,312,985,921,857,738,674,610,546,244,180,116, 972,908,844,780,478,414,350,286,959,895,712,648,584,520,218,154,1023,946,882, 818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298,971,843,660,532, 166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608,242,114,970, 842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790, 424,296,969,841,658,530,164,892,398,270,1020,943,760,632,138,994,866,500,372, 917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993, 865,682,554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,758, 630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212, 1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160, 888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418, 290,963,835,652,524,158,886,392,264,1014,937,754,626,132,988,860,494,366,911, 728,600,234] [views:debug,2014-08-19T16:49:57.308,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/835. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:57.308,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",835,active,0} [ns_server:debug,2014-08-19T16:49:57.482,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 833. Nacking mccouch update. [views:debug,2014-08-19T16:49:57.483,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/833. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:57.483,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",833,active,0} [ns_server:debug,2014-08-19T16:49:57.484,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,646,518,152, 880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828, 462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776,410, 282,955,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903, 720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,851,668, 540,174,902,774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122, 978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798, 432,304,977,849,666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002, 925,742,614,248,120,976,848,482,354,899,716,588,222,950,822,456,328,873,690, 562,196,1001,924,796,430,302,975,847,664,536,170,962,898,834,770,468,404,340, 276,949,885,766,702,638,574,208,144,1013,936,872,808,506,442,378,314,1000, 987,923,859,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961, 897,833,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,935,871, 752,688,624,560,194,130,986,922,858,794,492,428,364,300,973,909,845,726,662, 598,534,232,168,960,896,832,768,466,402,338,274,947,883,764,700,636,572,206, 142,1011,998,934,870,806,504,440,376,312,985,921,857,738,674,610,546,244,180, 116,972,908,844,780,478,414,350,286,959,895,712,648,584,520,218,154,1023,946, 882,818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298,971,843,660, 532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608,242,114, 970,842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918, 790,424,296,969,841,658,530,164,892,398,270,1020,943,760,632,138,994,866,500, 372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320, 993,865,682,554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941, 758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578, 212,1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526, 160,888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964, 836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784, 418,290,963,835,652,524,158,886,392,264,1014,937,754,626,132,988,860,494,366, 911,728,600,234] [views:debug,2014-08-19T16:49:57.542,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/833. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:57.542,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",833,active,0} [ns_server:debug,2014-08-19T16:49:57.717,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 831. Nacking mccouch update. [views:debug,2014-08-19T16:49:57.717,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/831. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:57.717,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",831,active,0} [ns_server:debug,2014-08-19T16:49:57.718,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,646,518,152, 880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956,828, 462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776,410, 282,955,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358,903, 720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,851,668, 540,174,902,774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250,122, 978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926,798, 432,304,977,849,666,538,172,900,772,406,278,951,640,512,146,874,508,380,1002, 925,742,614,248,120,976,848,482,354,899,716,588,222,950,822,456,328,873,690, 562,196,1001,924,796,430,302,975,847,664,536,170,962,898,834,770,468,404,340, 276,949,885,766,702,638,574,208,144,1013,936,872,808,506,442,378,314,1000, 987,923,859,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961, 897,833,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,935,871, 752,688,624,560,194,130,986,922,858,794,492,428,364,300,973,909,845,726,662, 598,534,232,168,960,896,832,768,466,402,338,274,947,883,764,700,636,572,206, 142,1011,998,934,870,806,504,440,376,312,985,921,857,738,674,610,546,244,180, 116,972,908,844,780,478,414,350,286,959,895,831,712,648,584,520,218,154,1023, 946,882,818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298,971,843, 660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608,242, 114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190, 918,790,424,296,969,841,658,530,164,892,398,270,1020,943,760,632,138,994,866, 500,372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448, 320,993,865,682,554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018, 941,758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706, 578,212,1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654, 526,160,888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108, 964,836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912, 784,418,290,963,835,652,524,158,886,392,264,1014,937,754,626,132,988,860,494, 366,911,728,600,234] [views:debug,2014-08-19T16:49:57.802,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/831. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:57.802,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",831,active,0} [ns_server:debug,2014-08-19T16:49:57.952,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 829. Nacking mccouch update. [views:debug,2014-08-19T16:49:57.952,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/829. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:57.952,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",829,active,0} [ns_server:debug,2014-08-19T16:49:57.953,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646,518, 152,880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956, 828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776, 410,282,955,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486,358, 903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979,851, 668,540,174,902,774,408,280,953,642,514,148,876,510,382,1004,927,744,616,250, 122,978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198,1003,926, 798,432,304,977,849,666,538,172,900,772,406,278,951,640,512,146,874,508,380, 1002,925,742,614,248,120,976,848,482,354,899,716,588,222,950,822,456,328,873, 690,562,196,1001,924,796,430,302,975,847,664,536,170,898,770,404,276,949,885, 766,702,638,574,208,144,1013,936,872,808,506,442,378,314,1000,987,923,859, 740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961,897,833,714, 650,586,522,220,156,948,884,820,454,390,326,262,1012,999,935,871,752,688,624, 560,194,130,986,922,858,794,492,428,364,300,973,909,845,726,662,598,534,232, 168,960,896,832,768,466,402,338,274,947,883,764,700,636,572,206,142,1011,998, 934,870,806,504,440,376,312,985,921,857,738,674,610,546,244,180,116,972,908, 844,780,478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818, 452,388,324,260,1010,997,869,686,558,192,920,792,426,298,971,843,660,532,166, 894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608,242,114,970,842, 476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424, 296,969,841,658,530,164,892,398,270,1020,943,760,632,138,994,866,500,372,917, 734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865, 682,554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,758,630, 136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212,1017, 940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888, 394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470, 342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418,290, 963,835,652,524,158,886,392,264,1014,937,754,626,132,988,860,494,366,911,728, 600,234,962,834,468,340] [ns_server:debug,2014-08-19T16:49:57.949,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,980, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_980_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_980_'ns_1@10.242.238.90'">>}]}, {move_state,470, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_470_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_470_'ns_1@10.242.238.91'">>}]}, {move_state,726, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_726_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_726_'ns_1@10.242.238.91'">>}]}, {move_state,981, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_981_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_981_'ns_1@10.242.238.90'">>}]}, {move_state,471, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_471_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_471_'ns_1@10.242.238.91'">>}]}, {move_state,727, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_727_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_727_'ns_1@10.242.238.91'">>}]}, {move_state,982, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_982_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_982_'ns_1@10.242.238.90'">>}]}, {move_state,472, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_472_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_472_'ns_1@10.242.238.91'">>}]}, {move_state,728, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_728_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_728_'ns_1@10.242.238.91'">>}]}, {move_state,983, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_983_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_983_'ns_1@10.242.238.90'">>}]}, {move_state,473, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_473_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_473_'ns_1@10.242.238.91'">>}]}, {move_state,729, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_729_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_729_'ns_1@10.242.238.91'">>}]}, {move_state,984, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_984_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_984_'ns_1@10.242.238.90'">>}]}, {move_state,474, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_474_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_474_'ns_1@10.242.238.91'">>}]}, {move_state,730, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_730_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_730_'ns_1@10.242.238.91'">>}]}, {move_state,985, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_985_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_985_'ns_1@10.242.238.90'">>}]}, {move_state,475, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_475_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_475_'ns_1@10.242.238.91'">>}]}, {move_state,731, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_731_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_731_'ns_1@10.242.238.91'">>}]}, {move_state,986, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_986_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_986_'ns_1@10.242.238.90'">>}]}, {move_state,476, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_476_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_476_'ns_1@10.242.238.91'">>}]}, {move_state,732, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_732_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_732_'ns_1@10.242.238.91'">>}]}, {move_state,987, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_987_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_987_'ns_1@10.242.238.90'">>}]}, {move_state,477, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_477_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_477_'ns_1@10.242.238.91'">>}]}, {move_state,733, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_733_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_733_'ns_1@10.242.238.91'">>}]}, {move_state,988, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_988_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_988_'ns_1@10.242.238.90'">>}]}, {move_state,478, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_478_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_478_'ns_1@10.242.238.91'">>}]}, {move_state,734, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_734_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_734_'ns_1@10.242.238.91'">>}]}, {move_state,989, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_989_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_989_'ns_1@10.242.238.90'">>}]}, {move_state,479, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_479_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_479_'ns_1@10.242.238.91'">>}]}, {move_state,735, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_735_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_735_'ns_1@10.242.238.91'">>}]}, {move_state,990, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_990_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_990_'ns_1@10.242.238.90'">>}]}, {move_state,480, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_480_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_480_'ns_1@10.242.238.91'">>}]}, {move_state,736, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_736_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_736_'ns_1@10.242.238.91'">>}]}, {move_state,991, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_991_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_991_'ns_1@10.242.238.90'">>}]}, {move_state,481, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_481_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_481_'ns_1@10.242.238.91'">>}]}, {move_state,737, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_737_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_737_'ns_1@10.242.238.91'">>}]}, {move_state,992, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_992_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_992_'ns_1@10.242.238.90'">>}]}, {move_state,482, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_482_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_482_'ns_1@10.242.238.91'">>}]}, {move_state,738, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_738_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_738_'ns_1@10.242.238.91'">>}]}, {move_state,993, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_993_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_993_'ns_1@10.242.238.90'">>}]}, {move_state,483, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_483_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_483_'ns_1@10.242.238.91'">>}]}, {move_state,739, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_739_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_739_'ns_1@10.242.238.91'">>}]}, {move_state,994, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_994_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_994_'ns_1@10.242.238.90'">>}]}, {move_state,484, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_484_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_484_'ns_1@10.242.238.91'">>}]}, {move_state,740, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_740_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_740_'ns_1@10.242.238.91'">>}]}, {move_state,995, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_995_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_995_'ns_1@10.242.238.90'">>}]}, {move_state,485, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_485_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_485_'ns_1@10.242.238.91'">>}]}, {move_state,741, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_741_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_741_'ns_1@10.242.238.91'">>}]}, {move_state,996, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_996_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_996_'ns_1@10.242.238.90'">>}]}, {move_state,486, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_486_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_486_'ns_1@10.242.238.91'">>}]}, {move_state,742, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_742_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_742_'ns_1@10.242.238.91'">>}]}, {move_state,997, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_997_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_997_'ns_1@10.242.238.90'">>}]}, {move_state,487, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_487_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_487_'ns_1@10.242.238.91'">>}]}, {move_state,743, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_743_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_743_'ns_1@10.242.238.91'">>}]}, {move_state,998, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_998_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_998_'ns_1@10.242.238.90'">>}]}, {move_state,488, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_488_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_488_'ns_1@10.242.238.91'">>}]}, {move_state,744, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_744_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_744_'ns_1@10.242.238.91'">>}]}, {move_state,999, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_999_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_999_'ns_1@10.242.238.90'">>}]}, {move_state,489, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_489_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_489_'ns_1@10.242.238.91'">>}]}, {move_state,745, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_745_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_745_'ns_1@10.242.238.91'">>}]}, {move_state,1000, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1000_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1000_'ns_1@10.242.238.90'">>}]}, {move_state,490, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_490_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_490_'ns_1@10.242.238.91'">>}]}, {move_state,746, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_746_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_746_'ns_1@10.242.238.91'">>}]}, {move_state,1001, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1001_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1001_'ns_1@10.242.238.90'">>}]}] [ns_server:debug,2014-08-19T16:49:57.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 980, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:57.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 470, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 726, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 981, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:57.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 471, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 727, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 982, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:57.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 472, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 728, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 983, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:57.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 473, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 729, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 984, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:57.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 474, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 730, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 985, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:57.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 475, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 731, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 986, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:57.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 476, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 732, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 987, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:57.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 477, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 733, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 988, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:57.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 478, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 734, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 989, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:57.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 479, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 735, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 990, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:57.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 480, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 736, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 991, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:57.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 481, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 737, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 992, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:57.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 482, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 738, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 993, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:57.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 483, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 739, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 994, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:57.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 484, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 740, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 995, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:57.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 485, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 741, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 996, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:57.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 486, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 742, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 997, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:57.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 487, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 743, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 998, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:57.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 488, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 744, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.985,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 999, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:57.985,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 489, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.986,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 745, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.986,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1000, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:49:57.987,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 490, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.988,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 746, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:49:57.988,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1001, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [views:debug,2014-08-19T16:49:58.011,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/829. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:58.011,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",829,active,0} [ns_server:debug,2014-08-19T16:49:58.161,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 827. Nacking mccouch update. [views:debug,2014-08-19T16:49:58.161,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/827. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:58.161,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",827,active,0} [ns_server:debug,2014-08-19T16:49:58.162,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646,518, 152,880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956, 828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776, 410,282,955,827,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486, 358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979, 851,668,540,174,902,774,408,280,953,642,514,148,876,510,382,1004,927,744,616, 250,122,978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198,1003, 926,798,432,304,977,849,666,538,172,900,772,406,278,951,640,512,146,874,508, 380,1002,925,742,614,248,120,976,848,482,354,899,716,588,222,950,822,456,328, 873,690,562,196,1001,924,796,430,302,975,847,664,536,170,898,770,404,276,949, 885,766,702,638,574,208,144,1013,936,872,808,506,442,378,314,1000,987,923, 859,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961,897,833, 714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,935,871,752,688, 624,560,194,130,986,922,858,794,492,428,364,300,973,909,845,726,662,598,534, 232,168,960,896,832,768,466,402,338,274,947,883,764,700,636,572,206,142,1011, 998,934,870,806,504,440,376,312,985,921,857,738,674,610,546,244,180,116,972, 908,844,780,478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946,882, 818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298,971,843,660,532, 166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608,242,114,970, 842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790, 424,296,969,841,658,530,164,892,398,270,1020,943,760,632,138,994,866,500,372, 917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993, 865,682,554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,758, 630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212, 1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160, 888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418, 290,963,835,652,524,158,886,392,264,1014,937,754,626,132,988,860,494,366,911, 728,600,234,962,834,468,340] [views:debug,2014-08-19T16:49:58.240,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/827. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:58.240,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",827,active,0} [ns_server:debug,2014-08-19T16:49:58.323,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 825. Nacking mccouch update. [views:debug,2014-08-19T16:49:58.323,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/825. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:58.323,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",825,active,0} [ns_server:debug,2014-08-19T16:49:58.325,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646,518, 152,880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956, 828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776, 410,282,955,827,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486, 358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979, 851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382,1004,927,744, 616,250,122,978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198, 1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951,640,512,146,874, 508,380,1002,925,742,614,248,120,976,848,482,354,899,716,588,222,950,822,456, 328,873,690,562,196,1001,924,796,430,302,975,847,664,536,170,898,770,404,276, 949,885,766,702,638,574,208,144,1013,936,872,808,506,442,378,314,1000,987, 923,859,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961,897, 833,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,935,871,752, 688,624,560,194,130,986,922,858,794,492,428,364,300,973,909,845,726,662,598, 534,232,168,960,896,832,768,466,402,338,274,947,883,764,700,636,572,206,142, 1011,998,934,870,806,504,440,376,312,985,921,857,738,674,610,546,244,180,116, 972,908,844,780,478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946, 882,818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298,971,843,660, 532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608,242,114, 970,842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918, 790,424,296,969,841,658,530,164,892,398,270,1020,943,760,632,138,994,866,500, 372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320, 993,865,682,554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941, 758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578, 212,1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526, 160,888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964, 836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784, 418,290,963,835,652,524,158,886,392,264,1014,937,754,626,132,988,860,494,366, 911,728,600,234,962,834,468,340] [views:debug,2014-08-19T16:49:58.382,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/825. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:58.382,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",825,active,0} [ns_server:debug,2014-08-19T16:49:58.507,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 823. Nacking mccouch update. [views:debug,2014-08-19T16:49:58.507,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/823. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:58.507,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",823,active,0} [ns_server:debug,2014-08-19T16:49:58.508,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646,518, 152,880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956, 828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776, 410,282,955,827,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486, 358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979, 851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382,1004,927,744, 616,250,122,978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198, 1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951,823,640,512,146, 874,508,380,1002,925,742,614,248,120,976,848,482,354,899,716,588,222,950,822, 456,328,873,690,562,196,1001,924,796,430,302,975,847,664,536,170,898,770,404, 276,949,885,766,702,638,574,208,144,1013,936,872,808,506,442,378,314,1000, 987,923,859,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288,961, 897,833,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,935,871, 752,688,624,560,194,130,986,922,858,794,492,428,364,300,973,909,845,726,662, 598,534,232,168,960,896,832,768,466,402,338,274,947,883,764,700,636,572,206, 142,1011,998,934,870,806,504,440,376,312,985,921,857,738,674,610,546,244,180, 116,972,908,844,780,478,414,350,286,959,895,831,712,648,584,520,218,154,1023, 946,882,818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298,971,843, 660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736,608,242, 114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190, 918,790,424,296,969,841,658,530,164,892,398,270,1020,943,760,632,138,994,866, 500,372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448, 320,993,865,682,554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018, 941,758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706, 578,212,1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654, 526,160,888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108, 964,836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912, 784,418,290,963,835,652,524,158,886,392,264,1014,937,754,626,132,988,860,494, 366,911,728,600,234,962,834,468,340] [views:debug,2014-08-19T16:49:58.575,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/823. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:58.575,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",823,active,0} [ns_server:debug,2014-08-19T16:49:58.700,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 821. Nacking mccouch update. [views:debug,2014-08-19T16:49:58.700,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/821. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:58.700,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",821,active,0} [ns_server:debug,2014-08-19T16:49:58.701,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646,518, 152,880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956, 828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776, 410,282,955,827,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486, 358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979, 851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382,1004,927,744, 616,250,122,978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198, 1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951,823,640,512,146, 874,508,380,1002,925,742,614,248,120,976,848,482,354,899,716,588,222,950,822, 456,328,873,690,562,196,1001,924,796,430,302,975,847,664,536,170,898,770,404, 276,949,885,821,766,702,638,574,208,144,1013,936,872,808,506,442,378,314, 1000,987,923,859,740,676,612,548,246,182,118,974,910,846,782,480,416,352,288, 961,897,833,714,650,586,522,220,156,948,884,820,454,390,326,262,1012,999,935, 871,752,688,624,560,194,130,986,922,858,794,492,428,364,300,973,909,845,726, 662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,764,700,636,572, 206,142,1011,998,934,870,806,504,440,376,312,985,921,857,738,674,610,546,244, 180,116,972,908,844,780,478,414,350,286,959,895,831,712,648,584,520,218,154, 1023,946,882,818,452,388,324,260,1010,997,869,686,558,192,920,792,426,298, 971,843,660,532,166,894,400,272,1022,945,762,634,140,996,868,502,374,919,736, 608,242,114,970,842,476,348,893,710,582,216,1021,944,816,450,322,995,867,684, 556,190,918,790,424,296,969,841,658,530,164,892,398,270,1020,943,760,632,138, 994,866,500,372,917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942, 814,448,320,993,865,682,554,188,916,788,422,294,967,839,656,528,162,890,396, 268,1018,941,758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344, 889,706,578,212,1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965, 837,654,526,160,888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602, 236,108,964,836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550, 184,912,784,418,290,963,835,652,524,158,886,392,264,1014,937,754,626,132,988, 860,494,366,911,728,600,234,962,834,468,340] [views:debug,2014-08-19T16:49:58.768,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/821. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:58.768,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",821,active,0} [ns_server:debug,2014-08-19T16:49:58.892,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 819. Nacking mccouch update. [views:debug,2014-08-19T16:49:58.892,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/819. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:58.892,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",819,active,0} [ns_server:debug,2014-08-19T16:49:58.894,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646,518, 152,880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956, 828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776, 410,282,955,827,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486, 358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979, 851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382,1004,927,744, 616,250,122,978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198, 1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951,823,640,512,146, 874,508,380,1002,925,742,614,248,120,976,848,482,354,899,716,588,222,950,822, 456,328,873,690,562,196,1001,924,796,430,302,975,847,664,536,170,898,770,404, 276,949,821,766,638,144,936,872,808,506,442,378,314,1000,987,923,859,740,676, 612,548,246,182,118,974,910,846,782,480,416,352,288,961,897,833,714,650,586, 522,220,156,948,884,820,454,390,326,262,1012,999,935,871,752,688,624,560,194, 130,986,922,858,794,492,428,364,300,973,909,845,726,662,598,534,232,168,960, 896,832,768,466,402,338,274,947,883,819,764,700,636,572,206,142,1011,998,934, 870,806,504,440,376,312,985,921,857,738,674,610,546,244,180,116,972,908,844, 780,478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818,452, 388,324,260,1010,997,869,686,558,192,920,792,426,298,971,843,660,532,166,894, 400,272,1022,945,762,634,140,996,868,502,374,919,736,608,242,114,970,842,476, 348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296, 969,841,658,530,164,892,398,270,1020,943,760,632,138,994,866,500,372,917,734, 606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865,682, 554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,758,630,136, 992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212,1017,940, 812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888,394, 266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470,342, 887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418,290,963, 835,652,524,158,886,392,264,1014,937,754,626,132,988,860,494,366,911,728,600, 234,962,834,468,340,885,702,574,208,1013] [rebalance:info,2014-08-19T16:49:58.939,ns_1@10.242.238.88:<0.30003.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 490 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:49:58.939,ns_1@10.242.238.88:<0.30206.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 743 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:49:58.941,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 490 state to active [rebalance:info,2014-08-19T16:49:58.942,ns_1@10.242.238.88:<0.30003.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 490 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:49:58.942,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 743 state to active [rebalance:info,2014-08-19T16:49:58.943,ns_1@10.242.238.88:<0.30206.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 743 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:49:58.943,ns_1@10.242.238.88:<0.30003.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:49:58.943,ns_1@10.242.238.88:<0.30206.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:49:58.951,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/819. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:58.951,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",819,active,0} [ns_server:debug,2014-08-19T16:49:59.095,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 817. Nacking mccouch update. [views:debug,2014-08-19T16:49:59.095,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/817. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:59.095,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",817,active,0} [ns_server:debug,2014-08-19T16:49:59.096,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646,518, 152,880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956, 828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776, 410,282,955,827,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486, 358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979, 851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382,1004,927,744, 616,250,122,978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198, 1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951,823,640,512,146, 874,508,380,1002,925,742,614,248,120,976,848,482,354,899,716,588,222,950,822, 456,328,873,690,562,196,1001,924,796,430,302,975,847,664,536,170,898,770,404, 276,949,821,766,638,144,936,872,808,506,442,378,314,1000,987,923,859,740,676, 612,548,246,182,118,974,910,846,782,480,416,352,288,961,897,833,714,650,586, 522,220,156,948,884,820,454,390,326,262,1012,999,935,871,752,688,624,560,194, 130,986,922,858,794,492,428,364,300,973,909,845,726,662,598,534,232,168,960, 896,832,768,466,402,338,274,947,883,819,764,700,636,572,206,142,1011,998,934, 870,806,504,440,376,312,985,921,857,738,674,610,546,244,180,116,972,908,844, 780,478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818,452, 388,324,260,1010,997,869,686,558,192,920,792,426,298,971,843,660,532,166,894, 400,272,1022,945,817,762,634,140,996,868,502,374,919,736,608,242,114,970,842, 476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424, 296,969,841,658,530,164,892,398,270,1020,943,760,632,138,994,866,500,372,917, 734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865, 682,554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,758,630, 136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212,1017, 940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888, 394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836,470, 342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418,290, 963,835,652,524,158,886,392,264,1014,937,754,626,132,988,860,494,366,911,728, 600,234,962,834,468,340,885,702,574,208,1013] [views:debug,2014-08-19T16:49:59.146,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/817. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:59.146,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",817,active,0} [ns_server:info,2014-08-19T16:49:59.156,ns_1@10.242.238.88:<0.20999.0>:ns_orchestrator:handle_info:428]Skipping janitor in state rebalancing [ns_server:debug,2014-08-19T16:49:59.312,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 815. Nacking mccouch update. [views:debug,2014-08-19T16:49:59.312,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/815. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:59.313,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",815,active,0} [ns_server:debug,2014-08-19T16:49:59.314,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646,518, 152,880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956, 828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776, 410,282,955,827,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486, 358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979, 851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382,1004,927,744, 616,250,122,978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198, 1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951,823,640,512,146, 874,508,380,1002,925,742,614,248,120,976,848,482,354,899,716,588,222,950,822, 456,328,873,690,562,196,1001,924,796,430,302,975,847,664,536,170,898,770,404, 276,949,821,766,638,144,936,872,808,506,442,378,314,1000,987,923,859,740,676, 612,548,246,182,118,974,910,846,782,480,416,352,288,961,897,833,714,650,586, 522,220,156,948,884,820,454,390,326,262,1012,999,935,871,752,688,624,560,194, 130,986,922,858,794,492,428,364,300,973,909,845,726,662,598,534,232,168,960, 896,832,768,466,402,338,274,947,883,819,764,700,636,572,206,142,1011,998,934, 870,806,504,440,376,312,985,921,857,738,674,610,546,244,180,116,972,908,844, 780,478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818,452, 388,324,260,1010,997,869,686,558,192,920,792,426,298,971,843,660,532,166,894, 400,272,1022,945,817,762,634,140,996,868,502,374,919,736,608,242,114,970,842, 476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424, 296,969,841,658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372, 917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993, 865,682,554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,758, 630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212, 1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160, 888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418, 290,963,835,652,524,158,886,392,264,1014,937,754,626,132,988,860,494,366,911, 728,600,234,962,834,468,340,885,702,574,208,1013] [views:debug,2014-08-19T16:49:59.371,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/815. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:59.372,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",815,active,0} [ns_server:debug,2014-08-19T16:49:59.546,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 813. Nacking mccouch update. [views:debug,2014-08-19T16:49:59.547,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/813. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:59.547,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",813,active,0} [ns_server:debug,2014-08-19T16:49:59.548,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646,518, 152,880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956, 828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776, 410,282,955,827,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486, 358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979, 851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382,1004,927,744, 616,250,122,978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198, 1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951,823,640,512,146, 874,508,380,1002,925,742,614,248,120,976,848,482,354,899,716,588,222,950,822, 456,328,873,690,562,196,1001,924,796,430,302,975,847,664,536,170,898,770,404, 276,949,821,766,638,144,936,872,808,506,442,378,314,1000,987,923,859,740,676, 612,548,246,182,118,974,910,846,782,480,416,352,288,961,897,833,714,650,586, 522,220,156,948,884,820,454,390,326,262,1012,999,935,871,752,688,624,560,194, 130,986,922,858,794,492,428,364,300,973,909,845,726,662,598,534,232,168,960, 896,832,768,466,402,338,274,947,883,819,764,700,636,572,206,142,1011,998,934, 870,806,504,440,376,312,985,921,857,738,674,610,546,244,180,116,972,908,844, 780,478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818,452, 388,324,260,1010,997,869,686,558,192,920,792,426,298,971,843,660,532,166,894, 400,272,1022,945,817,762,634,140,996,868,502,374,919,736,608,242,114,970,842, 476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424, 296,969,841,658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372, 917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993, 865,682,554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813, 758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578, 212,1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526, 160,888,394,266,1016,939,756,628,134,990,862,496,368,913,730,602,236,108,964, 836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784, 418,290,963,835,652,524,158,886,392,264,1014,937,754,626,132,988,860,494,366, 911,728,600,234,962,834,468,340,885,702,574,208,1013] [views:debug,2014-08-19T16:49:59.606,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/813. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:59.606,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",813,active,0} [ns_server:debug,2014-08-19T16:49:59.781,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 811. Nacking mccouch update. [views:debug,2014-08-19T16:49:59.781,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/811. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:59.781,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",811,active,0} [ns_server:debug,2014-08-19T16:49:59.782,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646,518, 152,880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956, 828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776, 410,282,955,827,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486, 358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979, 851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382,1004,927,744, 616,250,122,978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198, 1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951,823,640,512,146, 874,508,380,1002,925,742,614,248,120,976,848,482,354,899,716,588,222,950,822, 456,328,873,690,562,196,1001,924,796,430,302,975,847,664,536,170,898,770,404, 276,949,821,766,638,144,936,872,808,506,442,378,314,1000,987,923,859,740,676, 612,548,246,182,118,974,910,846,782,480,416,352,288,961,897,833,714,650,586, 522,220,156,948,884,820,454,390,326,262,1012,999,935,871,752,688,624,560,194, 130,986,922,858,794,492,428,364,300,973,909,845,726,662,598,534,232,168,960, 896,832,768,466,402,338,274,947,883,819,764,700,636,572,206,142,1011,998,934, 870,806,504,440,376,312,985,921,857,738,674,610,546,244,180,116,972,908,844, 780,478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818,452, 388,324,260,1010,997,869,686,558,192,920,792,426,298,971,843,660,532,166,894, 400,272,1022,945,817,762,634,140,996,868,502,374,919,736,608,242,114,970,842, 476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424, 296,969,841,658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372, 917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993, 865,682,554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813, 758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578, 212,1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526, 160,888,394,266,1016,939,811,756,628,134,990,862,496,368,913,730,602,236,108, 964,836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912, 784,418,290,963,835,652,524,158,886,392,264,1014,937,754,626,132,988,860,494, 366,911,728,600,234,962,834,468,340,885,702,574,208,1013] [views:debug,2014-08-19T16:49:59.840,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/811. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:59.840,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",811,active,0} [ns_server:debug,2014-08-19T16:49:59.959,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 809. Nacking mccouch update. [views:debug,2014-08-19T16:49:59.959,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/809. Updated state: active (0) [ns_server:debug,2014-08-19T16:49:59.959,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",809,active,0} [ns_server:debug,2014-08-19T16:49:59.961,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646,518, 152,880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956, 828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776, 410,282,955,827,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486, 358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979, 851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382,1004,927,744, 616,250,122,978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198, 1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951,823,640,512,146, 874,508,380,1002,925,742,614,248,120,976,848,482,354,899,716,588,222,950,822, 456,328,873,690,562,196,1001,924,796,430,302,975,847,664,536,170,898,770,404, 276,949,821,766,638,144,872,506,378,1000,987,923,859,740,676,612,548,246,182, 118,974,910,846,782,480,416,352,288,961,897,833,714,650,586,522,220,156,948, 884,820,454,390,326,262,1012,999,935,871,752,688,624,560,194,130,986,922,858, 794,492,428,364,300,973,909,845,726,662,598,534,232,168,960,896,832,768,466, 402,338,274,947,883,819,764,700,636,572,206,142,1011,998,934,870,806,504,440, 376,312,985,921,857,738,674,610,546,244,180,116,972,908,844,780,478,414,350, 286,959,895,831,712,648,584,520,218,154,1023,946,882,818,452,388,324,260, 1010,997,869,686,558,192,920,792,426,298,971,843,660,532,166,894,400,272, 1022,945,817,762,634,140,996,868,502,374,919,736,608,242,114,970,842,476,348, 893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296,969, 841,658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917,734, 606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865,682, 554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630, 136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212,1017, 940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888, 394,266,1016,939,811,756,628,134,990,862,496,368,913,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418, 290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494,366, 911,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442,314] [views:debug,2014-08-19T16:50:00.011,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/809. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:00.011,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",809,active,0} [ns_server:debug,2014-08-19T16:50:00.111,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 807. Nacking mccouch update. [views:debug,2014-08-19T16:50:00.111,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/807. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:00.112,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",807,active,0} [ns_server:debug,2014-08-19T16:50:00.113,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646,518, 152,880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228,956, 828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904,776, 410,282,955,827,644,516,150,878,384,256,1006,929,746,618,252,124,980,852,486, 358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306,979, 851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382,1004,927,744, 616,250,122,978,850,484,356,901,718,590,224,952,824,458,330,875,692,564,198, 1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951,823,640,512,146, 874,508,380,1002,925,742,614,248,120,976,848,482,354,899,716,588,222,950,822, 456,328,873,690,562,196,1001,924,796,430,302,975,847,664,536,170,898,770,404, 276,949,821,766,638,144,872,506,378,1000,987,923,859,740,676,612,548,246,182, 118,974,910,846,782,480,416,352,288,961,897,833,714,650,586,522,220,156,948, 884,820,454,390,326,262,1012,999,935,871,807,752,688,624,560,194,130,986,922, 858,794,492,428,364,300,973,909,845,726,662,598,534,232,168,960,896,832,768, 466,402,338,274,947,883,819,764,700,636,572,206,142,1011,998,934,870,806,504, 440,376,312,985,921,857,738,674,610,546,244,180,116,972,908,844,780,478,414, 350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818,452,388,324,260, 1010,997,869,686,558,192,920,792,426,298,971,843,660,532,166,894,400,272, 1022,945,817,762,634,140,996,868,502,374,919,736,608,242,114,970,842,476,348, 893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296,969, 841,658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917,734, 606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865,682, 554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630, 136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212,1017, 940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888, 394,266,1016,939,811,756,628,134,990,862,496,368,913,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418, 290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494,366, 911,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442,314] [views:debug,2014-08-19T16:50:00.170,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/807. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:00.170,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",807,active,0} [ns_server:debug,2014-08-19T16:50:00.245,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 805. Nacking mccouch update. [views:debug,2014-08-19T16:50:00.245,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/805. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:00.246,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",805,active,0} [ns_server:debug,2014-08-19T16:50:00.247,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698, 570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646, 518,152,880,386,258,1008,931,748,620,254,126,982,854,488,360,905,722,594,228, 956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176,904, 776,410,282,955,827,644,516,150,878,384,256,1006,929,746,618,252,124,980,852, 486,358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434,306, 979,851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382,1004,927, 744,616,250,122,978,850,484,356,901,718,590,224,952,824,458,330,875,692,564, 198,1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951,823,640,512, 146,874,508,380,1002,925,742,614,248,120,976,848,482,354,899,716,588,222,950, 822,456,328,873,690,562,196,1001,924,796,430,302,975,847,664,536,170,898,770, 404,276,949,821,766,638,144,872,506,378,1000,987,923,859,740,676,612,548,246, 182,118,974,910,846,782,480,416,352,288,961,897,833,714,650,586,522,220,156, 948,884,820,454,390,326,262,1012,999,935,871,807,752,688,624,560,194,130,986, 922,858,794,492,428,364,300,973,909,845,726,662,598,534,232,168,960,896,832, 768,466,402,338,274,947,883,819,764,700,636,572,206,142,1011,998,934,870,806, 504,440,376,312,985,921,857,738,674,610,546,244,180,116,972,908,844,780,478, 414,350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818,452,388,324, 260,1010,997,869,686,558,192,920,792,426,298,971,843,660,532,166,894,400,272, 1022,945,817,762,634,140,996,868,502,374,919,736,608,242,114,970,842,476,348, 893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296,969, 841,658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917,734, 606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865,682, 554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630, 136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212,1017, 940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888, 394,266,1016,939,811,756,628,134,990,862,496,368,913,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418, 290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494,366, 911,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442,314] [views:debug,2014-08-19T16:50:00.279,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/805. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:00.280,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",805,active,0} [ns_server:debug,2014-08-19T16:50:00.354,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 803. Nacking mccouch update. [views:debug,2014-08-19T16:50:00.354,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/803. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:00.355,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",803,active,0} [ns_server:debug,2014-08-19T16:50:00.356,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698, 570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646, 518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,722,594, 228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176, 904,776,410,282,955,827,644,516,150,878,384,256,1006,929,746,618,252,124,980, 852,486,358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800,434, 306,979,851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382,1004, 927,744,616,250,122,978,850,484,356,901,718,590,224,952,824,458,330,875,692, 564,198,1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951,823,640, 512,146,874,508,380,1002,925,742,614,248,120,976,848,482,354,899,716,588,222, 950,822,456,328,873,690,562,196,1001,924,796,430,302,975,847,664,536,170,898, 770,404,276,949,821,766,638,144,872,506,378,1000,987,923,859,740,676,612,548, 246,182,118,974,910,846,782,480,416,352,288,961,897,833,714,650,586,522,220, 156,948,884,820,454,390,326,262,1012,999,935,871,807,752,688,624,560,194,130, 986,922,858,794,492,428,364,300,973,909,845,726,662,598,534,232,168,960,896, 832,768,466,402,338,274,947,883,819,764,700,636,572,206,142,1011,998,934,870, 806,504,440,376,312,985,921,857,738,674,610,546,244,180,116,972,908,844,780, 478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818,452,388, 324,260,1010,997,869,686,558,192,920,792,426,298,971,843,660,532,166,894,400, 272,1022,945,817,762,634,140,996,868,502,374,919,736,608,242,114,970,842,476, 348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296, 969,841,658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917, 734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865, 682,554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758, 630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212, 1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160, 888,394,266,1016,939,811,756,628,134,990,862,496,368,913,730,602,236,108,964, 836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784, 418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494, 366,911,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442,314] [views:debug,2014-08-19T16:50:00.388,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/803. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:00.389,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",803,active,0} [ns_server:debug,2014-08-19T16:50:00.480,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 801. Nacking mccouch update. [views:debug,2014-08-19T16:50:00.480,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/801. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:00.480,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",801,active,0} [ns_server:debug,2014-08-19T16:50:00.481,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698, 570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646, 518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,722,594, 228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176, 904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618,252,124, 980,852,486,358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800, 434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382, 1004,927,744,616,250,122,978,850,484,356,901,718,590,224,952,824,458,330,875, 692,564,198,1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951,823, 640,512,146,874,508,380,1002,925,742,614,248,120,976,848,482,354,899,716,588, 222,950,822,456,328,873,690,562,196,1001,924,796,430,302,975,847,664,536,170, 898,770,404,276,949,821,766,638,144,872,506,378,1000,987,923,859,740,676,612, 548,246,182,118,974,910,846,782,480,416,352,288,961,897,833,714,650,586,522, 220,156,948,884,820,454,390,326,262,1012,999,935,871,807,752,688,624,560,194, 130,986,922,858,794,492,428,364,300,973,909,845,726,662,598,534,232,168,960, 896,832,768,466,402,338,274,947,883,819,764,700,636,572,206,142,1011,998,934, 870,806,504,440,376,312,985,921,857,738,674,610,546,244,180,116,972,908,844, 780,478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818,452, 388,324,260,1010,997,869,686,558,192,920,792,426,298,971,843,660,532,166,894, 400,272,1022,945,817,762,634,140,996,868,502,374,919,736,608,242,114,970,842, 476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424, 296,969,841,658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372, 917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993, 865,682,554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813, 758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578, 212,1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526, 160,888,394,266,1016,939,811,756,628,134,990,862,496,368,913,730,602,236,108, 964,836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912, 784,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860, 494,366,911,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442,314] [views:debug,2014-08-19T16:50:00.549,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/801. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:00.549,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",801,active,0} [rebalance:info,2014-08-19T16:50:00.586,ns_1@10.242.238.88:<0.30129.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 744 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:00.586,ns_1@10.242.238.88:<0.30051.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 745 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:00.586,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 744 state to active [rebalance:info,2014-08-19T16:50:00.587,ns_1@10.242.238.88:<0.30129.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 744 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:00.588,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 745 state to active [rebalance:info,2014-08-19T16:50:00.589,ns_1@10.242.238.88:<0.30051.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 745 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:00.589,ns_1@10.242.238.88:<0.30129.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:00.589,ns_1@10.242.238.88:<0.30051.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:00.711,ns_1@10.242.238.88:<0.29974.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 746 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:00.711,ns_1@10.242.238.88:<0.30107.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 999 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:00.711,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 746 state to active [rebalance:info,2014-08-19T16:50:00.712,ns_1@10.242.238.88:<0.29974.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 746 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:00.712,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 999 state to active [rebalance:info,2014-08-19T16:50:00.713,ns_1@10.242.238.88:<0.30107.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 999 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:00.714,ns_1@10.242.238.88:<0.29974.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:00.714,ns_1@10.242.238.88:<0.30107.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:00.724,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 799. Nacking mccouch update. [views:debug,2014-08-19T16:50:00.724,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/799. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:00.724,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",799,active,0} [ns_server:debug,2014-08-19T16:50:00.725,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698, 570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646, 518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,722,594, 228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176, 904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618,252,124, 980,852,486,358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800, 434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382, 1004,927,799,744,616,250,122,978,850,484,356,901,718,590,224,952,824,458,330, 875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951, 823,640,512,146,874,508,380,1002,925,742,614,248,120,976,848,482,354,899,716, 588,222,950,822,456,328,873,690,562,196,1001,924,796,430,302,975,847,664,536, 170,898,770,404,276,949,821,766,638,144,872,506,378,1000,923,740,612,246,118, 974,910,846,782,480,416,352,288,961,897,833,714,650,586,522,220,156,948,884, 820,454,390,326,262,1012,999,935,871,807,752,688,624,560,194,130,986,922,858, 794,492,428,364,300,973,909,845,726,662,598,534,232,168,960,896,832,768,466, 402,338,274,947,883,819,764,700,636,572,206,142,1011,998,934,870,806,504,440, 376,312,985,921,857,738,674,610,546,244,180,116,972,908,844,780,478,414,350, 286,959,895,831,712,648,584,520,218,154,1023,946,882,818,452,388,324,260, 1010,997,869,686,558,192,920,792,426,298,971,843,660,532,166,894,400,272, 1022,945,817,762,634,140,996,868,502,374,919,736,608,242,114,970,842,476,348, 893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296,969, 841,658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917,734, 606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865,682, 554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630, 136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212,1017, 940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888, 394,266,1016,939,811,756,628,134,990,862,496,368,913,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418, 290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494,366, 911,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442,314,987,859, 676,548,182] [views:debug,2014-08-19T16:50:00.783,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/799. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:00.783,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",799,active,0} [rebalance:info,2014-08-19T16:50:00.810,ns_1@10.242.238.88:<0.30185.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 998 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:00.810,ns_1@10.242.238.88:<0.29945.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1001 state change: {'ns_1@10.242.238.88',active,paused, undefined} [ns_server:info,2014-08-19T16:50:00.811,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 998 state to active [rebalance:info,2014-08-19T16:50:00.812,ns_1@10.242.238.88:<0.30185.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 998 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:00.812,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1001 state to active [rebalance:info,2014-08-19T16:50:00.813,ns_1@10.242.238.88:<0.29945.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1001 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:00.813,ns_1@10.242.238.88:<0.30185.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:00.814,ns_1@10.242.238.88:<0.29945.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:00.912,ns_1@10.242.238.88:<0.30030.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1000 state change: {'ns_1@10.242.238.88',active,paused, undefined} [ns_server:info,2014-08-19T16:50:00.912,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 1000 state to active [rebalance:info,2014-08-19T16:50:00.913,ns_1@10.242.238.88:<0.30030.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 1000 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:00.914,ns_1@10.242.238.88:<0.30030.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:00.958,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 797. Nacking mccouch update. [views:debug,2014-08-19T16:50:00.958,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/797. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:00.959,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",797,active,0} [ns_server:debug,2014-08-19T16:50:00.960,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698, 570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646, 518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,722,594, 228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176, 904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618,252,124, 980,852,486,358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800, 434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382, 1004,927,799,744,616,250,122,978,850,484,356,901,718,590,224,952,824,458,330, 875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951, 823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976,848,482,354,899, 716,588,222,950,822,456,328,873,690,562,196,1001,924,796,430,302,975,847,664, 536,170,898,770,404,276,949,821,766,638,144,872,506,378,1000,923,740,612,246, 118,974,910,846,782,480,416,352,288,961,897,833,714,650,586,522,220,156,948, 884,820,454,390,326,262,1012,999,935,871,807,752,688,624,560,194,130,986,922, 858,794,492,428,364,300,973,909,845,726,662,598,534,232,168,960,896,832,768, 466,402,338,274,947,883,819,764,700,636,572,206,142,1011,998,934,870,806,504, 440,376,312,985,921,857,738,674,610,546,244,180,116,972,908,844,780,478,414, 350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818,452,388,324,260, 1010,997,869,686,558,192,920,792,426,298,971,843,660,532,166,894,400,272, 1022,945,817,762,634,140,996,868,502,374,919,736,608,242,114,970,842,476,348, 893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296,969, 841,658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917,734, 606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865,682, 554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630, 136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212,1017, 940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888, 394,266,1016,939,811,756,628,134,990,862,496,368,913,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418, 290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494,366, 911,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442,314,987,859, 676,548,182] [rebalance:info,2014-08-19T16:50:00.970,ns_1@10.242.238.88:<0.31557.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 471 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:00.971,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 471 state to active [rebalance:info,2014-08-19T16:50:00.972,ns_1@10.242.238.88:<0.31557.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 471 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:00.972,ns_1@10.242.238.88:<0.31557.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:01.017,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/797. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:01.017,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",797,active,0} [rebalance:info,2014-08-19T16:50:01.046,ns_1@10.242.238.88:<0.31389.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 473 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:01.046,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 473 state to active [rebalance:info,2014-08-19T16:50:01.048,ns_1@10.242.238.88:<0.31389.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 473 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:01.048,ns_1@10.242.238.88:<0.31389.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:01.121,ns_1@10.242.238.88:<0.31634.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 470 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:01.121,ns_1@10.242.238.88:<0.31234.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 475 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:01.122,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 475 state to active [rebalance:info,2014-08-19T16:50:01.123,ns_1@10.242.238.88:<0.31234.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 475 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:01.123,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 470 state to active [rebalance:info,2014-08-19T16:50:01.124,ns_1@10.242.238.88:<0.31634.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 470 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:01.124,ns_1@10.242.238.88:<0.31234.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:01.124,ns_1@10.242.238.88:<0.31634.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:01.192,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 795. Nacking mccouch update. [views:debug,2014-08-19T16:50:01.193,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/795. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:01.193,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",795,active,0} [ns_server:debug,2014-08-19T16:50:01.194,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698, 570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646, 518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,722,594, 228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176, 904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618,252,124, 980,852,486,358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800, 434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382, 1004,927,799,744,616,250,122,978,850,484,356,901,718,590,224,952,824,458,330, 875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951, 823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976,848,482,354,899, 716,588,222,950,822,456,328,873,690,562,196,1001,924,796,430,302,975,847,664, 536,170,898,770,404,276,949,821,766,638,144,872,506,378,1000,923,795,740,612, 246,118,974,910,846,782,480,416,352,288,961,897,833,714,650,586,522,220,156, 948,884,820,454,390,326,262,1012,999,935,871,807,752,688,624,560,194,130,986, 922,858,794,492,428,364,300,973,909,845,726,662,598,534,232,168,960,896,832, 768,466,402,338,274,947,883,819,764,700,636,572,206,142,1011,998,934,870,806, 504,440,376,312,985,921,857,738,674,610,546,244,180,116,972,908,844,780,478, 414,350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818,452,388,324, 260,1010,997,869,686,558,192,920,792,426,298,971,843,660,532,166,894,400,272, 1022,945,817,762,634,140,996,868,502,374,919,736,608,242,114,970,842,476,348, 893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296,969, 841,658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917,734, 606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865,682, 554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630, 136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212,1017, 940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888, 394,266,1016,939,811,756,628,134,990,862,496,368,913,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418, 290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494,366, 911,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442,314,987,859, 676,548,182] [rebalance:info,2014-08-19T16:50:01.232,ns_1@10.242.238.88:<0.31031.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 477 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:01.232,ns_1@10.242.238.88:<0.31480.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 472 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:01.232,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 477 state to active [rebalance:info,2014-08-19T16:50:01.233,ns_1@10.242.238.88:<0.31031.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 477 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:01.233,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 472 state to active [rebalance:info,2014-08-19T16:50:01.234,ns_1@10.242.238.88:<0.31480.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 472 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:01.234,ns_1@10.242.238.88:<0.31031.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:01.235,ns_1@10.242.238.88:<0.31480.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:01.251,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/795. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:01.252,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",795,active,0} [rebalance:info,2014-08-19T16:50:01.340,ns_1@10.242.238.88:<0.31311.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 474 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:01.340,ns_1@10.242.238.88:<0.30877.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 479 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:01.341,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 474 state to active [rebalance:info,2014-08-19T16:50:01.342,ns_1@10.242.238.88:<0.31311.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 474 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:01.342,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 479 state to active [rebalance:info,2014-08-19T16:50:01.343,ns_1@10.242.238.88:<0.30877.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 479 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:01.343,ns_1@10.242.238.88:<0.31311.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:01.344,ns_1@10.242.238.88:<0.30877.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:01.419,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 793. Nacking mccouch update. [views:debug,2014-08-19T16:50:01.419,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/793. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:01.420,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",793,active,0} [ns_server:debug,2014-08-19T16:50:01.421,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698, 570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646, 518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,722,594, 228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176, 904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618,252,124, 980,852,486,358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800, 434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382, 1004,927,799,744,616,250,122,978,850,484,356,901,718,590,224,952,824,458,330, 875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951, 823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976,848,482,354,899, 716,588,222,950,822,456,328,873,690,562,196,1001,924,796,430,302,975,847,664, 536,170,898,770,404,276,949,821,766,638,144,872,506,378,1000,923,795,740,612, 246,118,974,910,846,782,480,416,352,288,961,897,833,714,650,586,522,220,156, 948,884,820,454,390,326,262,1012,999,935,871,807,752,688,624,560,194,130,986, 922,858,794,492,428,364,300,973,909,845,726,662,598,534,232,168,960,896,832, 768,466,402,338,274,947,883,819,764,700,636,572,206,142,1011,998,934,870,806, 504,440,376,312,985,921,857,793,738,674,610,546,244,180,116,972,908,844,780, 478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818,452,388, 324,260,1010,997,869,686,558,192,920,792,426,298,971,843,660,532,166,894,400, 272,1022,945,817,762,634,140,996,868,502,374,919,736,608,242,114,970,842,476, 348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296, 969,841,658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917, 734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865, 682,554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758, 630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212, 1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160, 888,394,266,1016,939,811,756,628,134,990,862,496,368,913,730,602,236,108,964, 836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784, 418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494, 366,911,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442,314,987, 859,676,548,182] [rebalance:info,2014-08-19T16:50:01.426,ns_1@10.242.238.88:<0.31152.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 476 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:01.426,ns_1@10.242.238.88:<0.30723.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 481 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:01.426,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 476 state to active [rebalance:info,2014-08-19T16:50:01.427,ns_1@10.242.238.88:<0.31152.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 476 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:01.427,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 481 state to active [rebalance:info,2014-08-19T16:50:01.428,ns_1@10.242.238.88:<0.30723.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 481 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:01.428,ns_1@10.242.238.88:<0.31152.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:01.429,ns_1@10.242.238.88:<0.30723.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:01.487,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/793. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:01.487,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",793,active,0} [rebalance:info,2014-08-19T16:50:01.509,ns_1@10.242.238.88:<0.30940.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 478 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:01.509,ns_1@10.242.238.88:<0.30569.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 483 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:01.510,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 478 state to active [rebalance:info,2014-08-19T16:50:01.511,ns_1@10.242.238.88:<0.30940.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 478 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:01.511,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 483 state to active [rebalance:info,2014-08-19T16:50:01.512,ns_1@10.242.238.88:<0.30569.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 483 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:01.512,ns_1@10.242.238.88:<0.30940.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:01.512,ns_1@10.242.238.88:<0.30569.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:01.601,ns_1@10.242.238.88:<0.30800.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 480 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:01.602,ns_1@10.242.238.88:<0.30400.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 485 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:01.602,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 480 state to active [rebalance:info,2014-08-19T16:50:01.603,ns_1@10.242.238.88:<0.30800.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 480 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:01.603,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 485 state to active [rebalance:info,2014-08-19T16:50:01.604,ns_1@10.242.238.88:<0.30400.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 485 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:01.605,ns_1@10.242.238.88:<0.30800.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:01.605,ns_1@10.242.238.88:<0.30400.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:01.622,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 791. Nacking mccouch update. [views:debug,2014-08-19T16:50:01.622,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/791. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:01.622,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",791,active,0} [ns_server:debug,2014-08-19T16:50:01.624,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698, 570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646, 518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,722,594, 228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176, 904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618,252,124, 980,852,486,358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800, 434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382, 1004,927,799,744,616,250,122,978,850,484,356,901,718,590,224,952,824,458,330, 875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951, 823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976,848,482,354,899, 716,588,222,950,822,456,328,873,690,562,196,1001,924,796,430,302,975,847,664, 536,170,898,770,404,276,949,821,766,638,144,872,506,378,1000,923,795,740,612, 246,118,974,910,846,782,480,416,352,288,961,897,833,714,650,586,522,220,156, 948,884,820,454,390,326,262,1012,999,935,871,807,752,688,624,560,194,130,986, 922,858,794,492,428,364,300,973,909,845,726,662,598,534,232,168,960,896,832, 768,466,402,338,274,947,883,819,764,700,636,572,206,142,1011,998,934,870,806, 504,440,376,312,985,921,857,793,738,674,610,546,244,180,116,972,908,844,780, 478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818,452,388, 324,260,1010,997,869,686,558,192,920,792,426,298,971,843,660,532,166,894,400, 272,1022,945,817,762,634,140,996,868,502,374,919,791,736,608,242,114,970,842, 476,348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424, 296,969,841,658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372, 917,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993, 865,682,554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813, 758,630,136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578, 212,1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526, 160,888,394,266,1016,939,811,756,628,134,990,862,496,368,913,730,602,236,108, 964,836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912, 784,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860, 494,366,911,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442,314, 987,859,676,548,182] [views:debug,2014-08-19T16:50:01.681,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/791. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:01.681,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",791,active,0} [rebalance:info,2014-08-19T16:50:01.694,ns_1@10.242.238.88:<0.30646.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 482 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:01.693,ns_1@10.242.238.88:<0.30227.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 487 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:01.694,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 482 state to active [rebalance:info,2014-08-19T16:50:01.695,ns_1@10.242.238.88:<0.30646.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 482 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:01.695,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 487 state to active [rebalance:info,2014-08-19T16:50:01.696,ns_1@10.242.238.88:<0.30227.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 487 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:01.697,ns_1@10.242.238.88:<0.30646.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:01.697,ns_1@10.242.238.88:<0.30227.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:01.756,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 789. Nacking mccouch update. [views:debug,2014-08-19T16:50:01.756,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/789. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:01.756,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",789,active,0} [ns_server:debug,2014-08-19T16:50:01.758,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698, 570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646, 518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,722,594, 228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176, 904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618,252,124, 980,852,486,358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800, 434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382, 1004,927,799,744,616,250,122,978,850,484,356,901,718,590,224,952,824,458,330, 875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951, 823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976,848,482,354,899, 716,588,222,950,822,456,328,873,690,562,196,1001,924,796,430,302,975,847,664, 536,170,898,770,404,276,949,821,766,638,144,872,506,378,1000,923,795,740,612, 246,118,974,846,480,352,961,897,833,714,650,586,522,220,156,948,884,820,454, 390,326,262,1012,999,935,871,807,752,688,624,560,194,130,986,922,858,794,492, 428,364,300,973,909,845,726,662,598,534,232,168,960,896,832,768,466,402,338, 274,947,883,819,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312, 985,921,857,793,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 959,895,831,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010, 997,869,686,558,192,920,792,426,298,971,843,660,532,166,894,400,272,1022,945, 817,762,634,140,996,868,502,374,919,791,736,608,242,114,970,842,476,348,893, 710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296,969,841, 658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734, 606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865,682, 554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630, 136,992,864,498,370,915,732,604,238,110,966,838,472,344,889,706,578,212,1017, 940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888, 394,266,1016,939,811,756,628,134,990,862,496,368,913,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418, 290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494,366, 911,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442,314,987,859, 676,548,182,910,782,416,288] [views:debug,2014-08-19T16:50:01.790,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/789. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:01.790,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",789,active,0} [rebalance:info,2014-08-19T16:50:01.802,ns_1@10.242.238.88:<0.30086.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 489 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:01.802,ns_1@10.242.238.88:<0.30481.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 484 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:01.803,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 489 state to active [rebalance:info,2014-08-19T16:50:01.804,ns_1@10.242.238.88:<0.30086.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 489 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:01.804,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 484 state to active [rebalance:info,2014-08-19T16:50:01.805,ns_1@10.242.238.88:<0.30481.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 484 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:01.806,ns_1@10.242.238.88:<0.30086.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:01.806,ns_1@10.242.238.88:<0.30481.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:01.865,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 787. Nacking mccouch update. [views:debug,2014-08-19T16:50:01.866,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/787. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:01.866,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",787,active,0} [ns_server:debug,2014-08-19T16:50:01.868,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698, 570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646, 518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,722,594, 228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176, 904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618,252,124, 980,852,486,358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800, 434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382, 1004,927,799,744,616,250,122,978,850,484,356,901,718,590,224,952,824,458,330, 875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951, 823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976,848,482,354,899, 716,588,222,950,822,456,328,873,690,562,196,1001,924,796,430,302,975,847,664, 536,170,898,770,404,276,949,821,766,638,144,872,506,378,1000,923,795,740,612, 246,118,974,846,480,352,961,897,833,714,650,586,522,220,156,948,884,820,454, 390,326,262,1012,999,935,871,807,752,688,624,560,194,130,986,922,858,794,492, 428,364,300,973,909,845,726,662,598,534,232,168,960,896,832,768,466,402,338, 274,947,883,819,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312, 985,921,857,793,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 959,895,831,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010, 997,869,686,558,192,920,792,426,298,971,843,660,532,166,894,400,272,1022,945, 817,762,634,140,996,868,502,374,919,791,736,608,242,114,970,842,476,348,893, 710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296,969,841, 658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734, 606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865,682, 554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630, 136,992,864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212, 1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160, 888,394,266,1016,939,811,756,628,134,990,862,496,368,913,730,602,236,108,964, 836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784, 418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494, 366,911,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442,314,987, 859,676,548,182,910,782,416,288] [views:debug,2014-08-19T16:50:01.900,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/787. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:01.900,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",787,active,0} [rebalance:info,2014-08-19T16:50:01.903,ns_1@10.242.238.88:<0.30304.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 486 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:01.903,ns_1@10.242.238.88:<0.31522.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 727 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:01.903,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 486 state to active [rebalance:info,2014-08-19T16:50:01.904,ns_1@10.242.238.88:<0.30304.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 486 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:01.904,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 727 state to active [rebalance:info,2014-08-19T16:50:01.905,ns_1@10.242.238.88:<0.31522.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 727 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:01.906,ns_1@10.242.238.88:<0.30304.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:01.906,ns_1@10.242.238.88:<0.31522.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:01.975,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 785. Nacking mccouch update. [views:debug,2014-08-19T16:50:01.975,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/785. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:01.975,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",785,active,0} [ns_server:debug,2014-08-19T16:50:01.976,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698, 570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646, 518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,722,594, 228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176, 904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618,252,124, 980,852,486,358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800, 434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382, 1004,927,799,744,616,250,122,978,850,484,356,901,718,590,224,952,824,458,330, 875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951, 823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976,848,482,354,899, 716,588,222,950,822,456,328,873,690,562,196,1001,924,796,430,302,975,847,664, 536,170,898,770,404,276,949,821,766,638,144,872,506,378,1000,923,795,740,612, 246,118,974,846,480,352,961,897,833,714,650,586,522,220,156,948,884,820,454, 390,326,262,1012,999,935,871,807,752,688,624,560,194,130,986,922,858,794,492, 428,364,300,973,909,845,726,662,598,534,232,168,960,896,832,768,466,402,338, 274,947,883,819,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312, 985,921,857,793,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 959,895,831,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010, 997,869,686,558,192,920,792,426,298,971,843,660,532,166,894,400,272,1022,945, 817,762,634,140,996,868,502,374,919,791,736,608,242,114,970,842,476,348,893, 710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296,969,841, 658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734, 606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865,682, 554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630, 136,992,864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212, 1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160, 888,394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108, 964,836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912, 784,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860, 494,366,911,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442,314, 987,859,676,548,182,910,782,416,288] [views:debug,2014-08-19T16:50:02.009,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/785. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:02.009,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",785,active,0} [rebalance:info,2014-08-19T16:50:02.013,ns_1@10.242.238.88:<0.31368.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 729 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:02.013,ns_1@10.242.238.88:<0.32506.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 488) [ns_server:info,2014-08-19T16:50:02.013,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 729 state to active [rebalance:info,2014-08-19T16:50:02.014,ns_1@10.242.238.88:<0.30164.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:02.015,ns_1@10.242.238.88:<0.31368.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 729 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:02.015,ns_1@10.242.238.88:<0.31368.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:02.018,ns_1@10.242.238.88:<0.30172.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_488_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:02.018,ns_1@10.242.238.88:<0.30164.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:02.020,ns_1@10.242.238.88:<0.30164.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 488 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.32513.0> [ns_server:info,2014-08-19T16:50:02.021,ns_1@10.242.238.88:<0.32513.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 488 to state replica [ns_server:debug,2014-08-19T16:50:02.060,ns_1@10.242.238.88:<0.32513.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_488 [rebalance:info,2014-08-19T16:50:02.062,ns_1@10.242.238.88:<0.32513.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[488]}, {checkpoints,[{488,1}]}, {name,<<"rebalance_488">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[488]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"488"}]} [rebalance:debug,2014-08-19T16:50:02.063,ns_1@10.242.238.88:<0.32513.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32528.0> [rebalance:info,2014-08-19T16:50:02.064,ns_1@10.242.238.88:<0.32513.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:02.066,ns_1@10.242.238.88:<0.32513.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:02.066,ns_1@10.242.238.88:<0.32513.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:02.067,ns_1@10.242.238.88:<0.30164.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 488 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:02.069,ns_1@10.242.238.88:<0.30172.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:02.073,ns_1@10.242.238.88:<0.32529.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 490) [rebalance:info,2014-08-19T16:50:02.073,ns_1@10.242.238.88:<0.31208.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 731 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:02.074,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 731 state to active [rebalance:info,2014-08-19T16:50:02.074,ns_1@10.242.238.88:<0.30003.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [ns_server:info,2014-08-19T16:50:02.074,ns_1@10.242.238.88:<0.30172.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_488_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:02.075,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 488 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:02.075,ns_1@10.242.238.88:<0.32533.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 488 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:info,2014-08-19T16:50:02.075,ns_1@10.242.238.88:<0.31208.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 731 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:02.075,ns_1@10.242.238.88:<0.31208.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:02.077,ns_1@10.242.238.88:<0.30017.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_490_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:02.077,ns_1@10.242.238.88:<0.30003.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:02.079,ns_1@10.242.238.88:<0.30003.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 490 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.32540.0> [ns_server:info,2014-08-19T16:50:02.080,ns_1@10.242.238.88:<0.32540.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 490 to state replica [ns_server:debug,2014-08-19T16:50:02.084,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 783. Nacking mccouch update. [views:debug,2014-08-19T16:50:02.084,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/783. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:02.084,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",783,active,0} [ns_server:debug,2014-08-19T16:50:02.085,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698, 570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646, 518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,722,594, 228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176, 904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618,252,124, 980,852,486,358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800, 434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382, 1004,927,799,744,616,250,122,978,850,484,356,901,718,590,224,952,824,458,330, 875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951, 823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976,848,482,354,899, 716,588,222,950,822,456,328,873,690,562,196,1001,924,796,430,302,975,847,664, 536,170,898,770,404,276,949,821,766,638,144,872,506,378,1000,923,795,740,612, 246,118,974,846,480,352,961,897,833,714,650,586,522,220,156,948,884,820,454, 390,326,262,1012,999,935,871,807,752,688,624,560,194,130,986,922,858,794,492, 428,364,300,973,909,845,726,662,598,534,232,168,960,896,832,768,466,402,338, 274,947,883,819,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312, 985,921,857,793,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 959,895,831,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010, 997,869,686,558,192,920,792,426,298,971,843,660,532,166,894,400,272,1022,945, 817,762,634,140,996,868,502,374,919,791,736,608,242,114,970,842,476,348,893, 710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296,969,841, 658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734, 606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865,682, 554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630, 136,992,864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212, 1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160, 888,394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108, 964,836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912, 784,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860, 494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442, 314,987,859,676,548,182,910,782,416,288] [ns_server:debug,2014-08-19T16:50:02.089,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:02.090,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:02.090,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:02.091,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:02.091,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{488, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:02.098,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 488 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:02.099,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 488) [ns_server:debug,2014-08-19T16:50:02.099,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:02.114,ns_1@10.242.238.88:<0.32540.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_490 [rebalance:info,2014-08-19T16:50:02.116,ns_1@10.242.238.88:<0.32540.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[490]}, {checkpoints,[{490,1}]}, {name,<<"rebalance_490">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[490]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"490"}]} [rebalance:debug,2014-08-19T16:50:02.117,ns_1@10.242.238.88:<0.32540.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32550.0> [rebalance:info,2014-08-19T16:50:02.117,ns_1@10.242.238.88:<0.32540.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:02.119,ns_1@10.242.238.88:<0.32540.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:02.119,ns_1@10.242.238.88:<0.32540.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:02.120,ns_1@10.242.238.88:<0.30003.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 490 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:02.123,ns_1@10.242.238.88:<0.30017.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:02.126,ns_1@10.242.238.88:<0.30017.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_490_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:02.127,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 490 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:02.127,ns_1@10.242.238.88:<0.32554.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 490 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [views:debug,2014-08-19T16:50:02.138,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/783. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:02.138,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",783,active,0} [rebalance:info,2014-08-19T16:50:02.138,ns_1@10.242.238.88:<0.31613.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 726 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:02.138,ns_1@10.242.238.88:<0.30996.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 733 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:02.139,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 726 state to active [ns_server:debug,2014-08-19T16:50:02.140,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:02.140,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{490, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:02.141,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:02.141,ns_1@10.242.238.88:<0.31613.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 726 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:02.141,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 733 state to active [ns_server:debug,2014-08-19T16:50:02.142,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:02.142,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:02.146,ns_1@10.242.238.88:<0.30996.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 733 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:02.146,ns_1@10.242.238.88:<0.31613.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:02.147,ns_1@10.242.238.88:<0.30996.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:02.152,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 490 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:02.153,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 490) [ns_server:debug,2014-08-19T16:50:02.153,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:02.239,ns_1@10.242.238.88:<0.31445.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 728 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:02.239,ns_1@10.242.238.88:<0.30842.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 735 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:02.239,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 728 state to active [rebalance:info,2014-08-19T16:50:02.240,ns_1@10.242.238.88:<0.31445.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 728 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:02.241,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 735 state to active [rebalance:info,2014-08-19T16:50:02.242,ns_1@10.242.238.88:<0.30842.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 735 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:02.242,ns_1@10.242.238.88:<0.31445.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:02.242,ns_1@10.242.238.88:<0.30842.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:02.295,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 781. Nacking mccouch update. [views:debug,2014-08-19T16:50:02.296,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/781. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:02.296,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",781,active,0} [ns_server:debug,2014-08-19T16:50:02.297,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,724,596,230,958,830,464,336,881,698, 570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829,646, 518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,722,594, 228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542,176, 904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618,252,124, 980,852,486,358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928,800, 434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148,876,510,382, 1004,927,799,744,616,250,122,978,850,484,356,901,718,590,224,952,824,458,330, 875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900,772,406,278,951, 823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976,848,482,354,899, 716,588,222,950,822,456,328,873,690,562,196,1001,924,796,430,302,975,847,664, 536,170,898,770,404,276,949,821,766,638,144,872,506,378,1000,923,795,740,612, 246,118,974,846,480,352,961,897,833,714,650,586,522,220,156,948,884,820,454, 390,326,262,1012,999,935,871,807,752,688,624,560,194,130,986,922,858,794,492, 428,364,300,973,909,845,781,726,662,598,534,232,168,960,896,832,768,466,402, 338,274,947,883,819,764,700,636,572,206,142,1011,998,934,870,806,504,440,376, 312,985,921,857,793,738,674,610,546,244,180,116,972,908,844,780,478,414,350, 286,959,895,831,712,648,584,520,218,154,1023,946,882,818,452,388,324,260, 1010,997,869,686,558,192,920,792,426,298,971,843,660,532,166,894,400,272, 1022,945,817,762,634,140,996,868,502,374,919,791,736,608,242,114,970,842,476, 348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296, 969,841,658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917, 789,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993, 865,682,554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813, 758,630,136,992,864,498,370,915,787,732,604,238,110,966,838,472,344,889,706, 578,212,1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654, 526,160,888,394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602, 236,108,964,836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550, 184,912,784,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132, 988,860,494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936, 808,442,314,987,859,676,548,182,910,782,416,288] [views:debug,2014-08-19T16:50:02.346,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/781. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:02.347,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",781,active,0} [rebalance:info,2014-08-19T16:50:02.356,ns_1@10.242.238.88:<0.31290.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 730 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:02.356,ns_1@10.242.238.88:<0.30702.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 737 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:02.356,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 730 state to active [rebalance:info,2014-08-19T16:50:02.357,ns_1@10.242.238.88:<0.31290.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 730 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:02.358,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 737 state to active [rebalance:info,2014-08-19T16:50:02.358,ns_1@10.242.238.88:<0.30702.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 737 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:02.359,ns_1@10.242.238.88:<0.31290.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:02.359,ns_1@10.242.238.88:<0.30702.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:02.473,ns_1@10.242.238.88:<0.30533.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 739 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:02.473,ns_1@10.242.238.88:<0.31087.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 732 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:02.474,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 739 state to active [rebalance:info,2014-08-19T16:50:02.475,ns_1@10.242.238.88:<0.30533.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 739 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:02.475,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 732 state to active [rebalance:info,2014-08-19T16:50:02.476,ns_1@10.242.238.88:<0.31087.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 732 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:02.476,ns_1@10.242.238.88:<0.30533.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:02.476,ns_1@10.242.238.88:<0.31087.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:02.496,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 779. Nacking mccouch update. [views:debug,2014-08-19T16:50:02.496,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/779. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:02.497,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",779,active,0} [ns_server:debug,2014-08-19T16:50:02.498,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829, 646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,722, 594,228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670,542, 176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618,252, 124,980,852,486,358,903,720,592,226,954,826,460,332,877,694,566,200,1005,928, 800,434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148,876,510, 382,1004,927,799,744,616,250,122,978,850,484,356,901,718,590,224,952,824,458, 330,875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900,772,406,278, 951,823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976,848,482,354, 899,716,588,222,950,822,456,328,873,690,562,196,1001,924,796,430,302,975,847, 664,536,170,898,770,404,276,949,821,766,638,144,872,506,378,1000,923,795,740, 612,246,118,974,846,480,352,897,714,586,220,948,884,820,454,390,326,262,1012, 999,935,871,807,752,688,624,560,194,130,986,922,858,794,492,428,364,300,973, 909,845,781,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,883, 819,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921,857, 793,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,895,831, 712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,869,686, 558,192,920,792,426,298,971,843,660,532,166,894,400,272,1022,945,817,762,634, 140,996,868,502,374,919,791,736,608,242,114,970,842,476,348,893,710,582,216, 1021,944,816,450,322,995,867,684,556,190,918,790,424,296,969,841,658,530,164, 892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606,240,112, 968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865,682,554,188,916, 788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992,864, 498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812, 446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888,394,266, 1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470, 342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418,290, 963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494,366,911, 783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442,314,987,859, 676,548,182,910,782,416,288,961,833,650,522,156] [views:debug,2014-08-19T16:50:02.556,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/779. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:02.556,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",779,active,0} [rebalance:info,2014-08-19T16:50:02.574,ns_1@10.242.238.88:<0.30919.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 734 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:02.574,ns_1@10.242.238.88:<0.30360.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 741 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:02.574,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 734 state to active [rebalance:info,2014-08-19T16:50:02.575,ns_1@10.242.238.88:<0.30919.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 734 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:02.575,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 741 state to active [rebalance:info,2014-08-19T16:50:02.576,ns_1@10.242.238.88:<0.30360.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 741 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:02.577,ns_1@10.242.238.88:<0.30919.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:02.577,ns_1@10.242.238.88:<0.30360.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:02.691,ns_1@10.242.238.88:<0.32647.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 743) [rebalance:info,2014-08-19T16:50:02.691,ns_1@10.242.238.88:<0.30779.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 736 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:02.691,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 736 state to active [rebalance:info,2014-08-19T16:50:02.692,ns_1@10.242.238.88:<0.30206.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:02.693,ns_1@10.242.238.88:<0.30779.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 736 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:02.693,ns_1@10.242.238.88:<0.30779.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:02.695,ns_1@10.242.238.88:<0.30214.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_743_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:02.696,ns_1@10.242.238.88:<0.30206.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:02.698,ns_1@10.242.238.88:<0.30206.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 743 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.32654.0> [ns_server:info,2014-08-19T16:50:02.699,ns_1@10.242.238.88:<0.32654.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 743 to state replica [ns_server:debug,2014-08-19T16:50:02.733,ns_1@10.242.238.88:<0.32654.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_743 [rebalance:info,2014-08-19T16:50:02.735,ns_1@10.242.238.88:<0.32654.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[743]}, {checkpoints,[{743,1}]}, {name,<<"rebalance_743">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[743]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"743"}]} [rebalance:debug,2014-08-19T16:50:02.735,ns_1@10.242.238.88:<0.32654.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32655.0> [rebalance:info,2014-08-19T16:50:02.736,ns_1@10.242.238.88:<0.32654.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:02.738,ns_1@10.242.238.88:<0.32654.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:02.738,ns_1@10.242.238.88:<0.32654.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:02.739,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 777. Nacking mccouch update. [views:debug,2014-08-19T16:50:02.739,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/777. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:02.739,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",777,active,0} [rebalance:info,2014-08-19T16:50:02.739,ns_1@10.242.238.88:<0.30206.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 743 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:02.740,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829, 646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,777, 722,594,228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670, 542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618, 252,124,980,852,486,358,903,720,592,226,954,826,460,332,877,694,566,200,1005, 928,800,434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148,876, 510,382,1004,927,799,744,616,250,122,978,850,484,356,901,718,590,224,952,824, 458,330,875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900,772,406, 278,951,823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976,848,482, 354,899,716,588,222,950,822,456,328,873,690,562,196,1001,924,796,430,302,975, 847,664,536,170,898,770,404,276,949,821,766,638,144,872,506,378,1000,923,795, 740,612,246,118,974,846,480,352,897,714,586,220,948,884,820,454,390,326,262, 1012,999,935,871,807,752,688,624,560,194,130,986,922,858,794,492,428,364,300, 973,909,845,781,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947, 883,819,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921, 857,793,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,895, 831,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,869, 686,558,192,920,792,426,298,971,843,660,532,166,894,400,272,1022,945,817,762, 634,140,996,868,502,374,919,791,736,608,242,114,970,842,476,348,893,710,582, 216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296,969,841,658,530, 164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606,240, 112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865,682,554,188, 916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992, 864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940, 812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888,394, 266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418, 290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494,366, 911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442,314,987, 859,676,548,182,910,782,416,288,961,833,650,522,156] [rebalance:info,2014-08-19T16:50:02.741,ns_1@10.242.238.88:<0.30625.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 738 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:debug,2014-08-19T16:50:02.741,ns_1@10.242.238.88:<0.30214.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:02.741,ns_1@10.242.238.88:<0.32656.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 745) [ns_server:info,2014-08-19T16:50:02.742,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 738 state to active [rebalance:info,2014-08-19T16:50:02.742,ns_1@10.242.238.88:<0.30051.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:02.742,ns_1@10.242.238.88:<0.30625.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 738 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:02.743,ns_1@10.242.238.88:<0.30625.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:02.745,ns_1@10.242.238.88:<0.30214.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_743_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:02.745,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 743 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:02.745,ns_1@10.242.238.88:<0.32665.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 743 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:info,2014-08-19T16:50:02.745,ns_1@10.242.238.88:<0.30059.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_745_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:02.745,ns_1@10.242.238.88:<0.30051.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:02.748,ns_1@10.242.238.88:<0.30051.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 745 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.32667.0> [ns_server:info,2014-08-19T16:50:02.748,ns_1@10.242.238.88:<0.32667.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 745 to state replica [ns_server:debug,2014-08-19T16:50:02.760,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:02.761,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:02.761,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:02.762,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:02.761,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{743, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:02.773,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 743 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:02.774,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 743) [ns_server:debug,2014-08-19T16:50:02.774,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:02.784,ns_1@10.242.238.88:<0.32667.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_745 [rebalance:info,2014-08-19T16:50:02.785,ns_1@10.242.238.88:<0.32667.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[745]}, {checkpoints,[{745,1}]}, {name,<<"rebalance_745">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[745]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"745"}]} [rebalance:debug,2014-08-19T16:50:02.786,ns_1@10.242.238.88:<0.32667.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.32677.0> [rebalance:info,2014-08-19T16:50:02.787,ns_1@10.242.238.88:<0.32667.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:02.788,ns_1@10.242.238.88:<0.32667.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:02.789,ns_1@10.242.238.88:<0.32667.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:02.790,ns_1@10.242.238.88:<0.30051.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 745 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:02.791,ns_1@10.242.238.88:<0.31578.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 981 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:02.791,ns_1@10.242.238.88:<0.30446.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 740 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:debug,2014-08-19T16:50:02.791,ns_1@10.242.238.88:<0.30059.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:02.792,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 981 state to active [rebalance:info,2014-08-19T16:50:02.793,ns_1@10.242.238.88:<0.31578.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 981 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:02.793,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 740 state to active [rebalance:info,2014-08-19T16:50:02.794,ns_1@10.242.238.88:<0.30446.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 740 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:02.794,ns_1@10.242.238.88:<0.31578.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:02.794,ns_1@10.242.238.88:<0.30446.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:02.795,ns_1@10.242.238.88:<0.30059.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_745_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:02.795,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 745 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:02.795,ns_1@10.242.238.88:<0.32689.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 745 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [views:debug,2014-08-19T16:50:02.798,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/777. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:02.798,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",777,active,0} [ns_server:debug,2014-08-19T16:50:02.807,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:02.808,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{745, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:02.808,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:02.808,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:02.809,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:02.815,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 745 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:02.815,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 745) [ns_server:debug,2014-08-19T16:50:02.816,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:02.862,ns_1@10.242.238.88:<0.30283.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 742 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:02.862,ns_1@10.242.238.88:<0.31424.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 983 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:02.862,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 742 state to active [rebalance:info,2014-08-19T16:50:02.863,ns_1@10.242.238.88:<0.30283.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 742 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:02.863,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 983 state to active [rebalance:info,2014-08-19T16:50:02.864,ns_1@10.242.238.88:<0.31424.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 983 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:02.865,ns_1@10.242.238.88:<0.30283.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:02.865,ns_1@10.242.238.88:<0.31424.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:02.946,ns_1@10.242.238.88:<0.31269.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 985 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:02.946,ns_1@10.242.238.88:<0.32721.0>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 744) [ns_server:info,2014-08-19T16:50:02.946,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 985 state to active [rebalance:info,2014-08-19T16:50:02.946,ns_1@10.242.238.88:<0.30129.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:02.947,ns_1@10.242.238.88:<0.31269.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 985 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:02.948,ns_1@10.242.238.88:<0.31269.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:02.950,ns_1@10.242.238.88:<0.30137.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_744_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:02.950,ns_1@10.242.238.88:<0.30129.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:02.952,ns_1@10.242.238.88:<0.30129.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 744 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.32728.0> [ns_server:info,2014-08-19T16:50:02.953,ns_1@10.242.238.88:<0.32728.0>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 744 to state replica [ns_server:debug,2014-08-19T16:50:02.949,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,980, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_980_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_980_'ns_1@10.242.238.90'">>}]}, {move_state,470, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_470_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_470_'ns_1@10.242.238.91'">>}]}, {move_state,726, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_726_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_726_'ns_1@10.242.238.91'">>}]}, {move_state,981, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_981_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_981_'ns_1@10.242.238.90'">>}]}, {move_state,471, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_471_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_471_'ns_1@10.242.238.91'">>}]}, {move_state,727, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_727_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_727_'ns_1@10.242.238.91'">>}]}, {move_state,982, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_982_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_982_'ns_1@10.242.238.90'">>}]}, {move_state,472, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_472_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_472_'ns_1@10.242.238.91'">>}]}, {move_state,728, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_728_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_728_'ns_1@10.242.238.91'">>}]}, {move_state,983, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_983_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_983_'ns_1@10.242.238.90'">>}]}, {move_state,473, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_473_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_473_'ns_1@10.242.238.91'">>}]}, {move_state,729, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_729_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_729_'ns_1@10.242.238.91'">>}]}, {move_state,984, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_984_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_984_'ns_1@10.242.238.90'">>}]}, {move_state,474, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_474_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_474_'ns_1@10.242.238.91'">>}]}, {move_state,730, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_730_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_730_'ns_1@10.242.238.91'">>}]}, {move_state,985, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_985_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_985_'ns_1@10.242.238.90'">>}]}, {move_state,475, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_475_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_475_'ns_1@10.242.238.91'">>}]}, {move_state,731, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_731_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_731_'ns_1@10.242.238.91'">>}]}, {move_state,986, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_986_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_986_'ns_1@10.242.238.90'">>}]}, {move_state,476, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_476_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_476_'ns_1@10.242.238.91'">>}]}, {move_state,732, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_732_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_732_'ns_1@10.242.238.91'">>}]}, {move_state,987, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_987_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_987_'ns_1@10.242.238.90'">>}]}, {move_state,477, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_477_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_477_'ns_1@10.242.238.91'">>}]}, {move_state,733, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_733_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_733_'ns_1@10.242.238.91'">>}]}, {move_state,988, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_988_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_988_'ns_1@10.242.238.90'">>}]}, {move_state,478, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_478_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_478_'ns_1@10.242.238.91'">>}]}, {move_state,734, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_734_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_734_'ns_1@10.242.238.91'">>}]}, {move_state,989, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_989_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_989_'ns_1@10.242.238.90'">>}]}, {move_state,479, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_479_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_479_'ns_1@10.242.238.91'">>}]}, {move_state,735, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_735_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_735_'ns_1@10.242.238.91'">>}]}, {move_state,990, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_990_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_990_'ns_1@10.242.238.90'">>}]}, {move_state,480, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_480_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_480_'ns_1@10.242.238.91'">>}]}, {move_state,736, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_736_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_736_'ns_1@10.242.238.91'">>}]}, {move_state,991, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_991_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_991_'ns_1@10.242.238.90'">>}]}, {move_state,481, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_481_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_481_'ns_1@10.242.238.91'">>}]}, {move_state,737, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_737_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_737_'ns_1@10.242.238.91'">>}]}, {move_state,992, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_992_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_992_'ns_1@10.242.238.90'">>}]}, {move_state,482, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_482_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_482_'ns_1@10.242.238.91'">>}]}, {move_state,738, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_738_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_738_'ns_1@10.242.238.91'">>}]}, {move_state,993, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_993_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_993_'ns_1@10.242.238.90'">>}]}, {move_state,483, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_483_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_483_'ns_1@10.242.238.91'">>}]}, {move_state,739, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_739_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_739_'ns_1@10.242.238.91'">>}]}, {move_state,994, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_994_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_994_'ns_1@10.242.238.90'">>}]}, {move_state,484, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_484_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_484_'ns_1@10.242.238.91'">>}]}, {move_state,740, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_740_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_740_'ns_1@10.242.238.91'">>}]}, {move_state,995, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_995_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_995_'ns_1@10.242.238.90'">>}]}, {move_state,485, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_485_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_485_'ns_1@10.242.238.91'">>}]}, {move_state,741, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_741_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_741_'ns_1@10.242.238.91'">>}]}, {move_state,996, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_996_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_996_'ns_1@10.242.238.90'">>}]}, {move_state,486, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_486_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_486_'ns_1@10.242.238.91'">>}]}, {move_state,742, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_742_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_742_'ns_1@10.242.238.91'">>}]}, {move_state,997, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_997_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_997_'ns_1@10.242.238.90'">>}]}, {move_state,487, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_487_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_487_'ns_1@10.242.238.91'">>}]}, {move_state,998, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_998_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_998_'ns_1@10.242.238.90'">>}]}, {move_state,744, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_744_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_744_'ns_1@10.242.238.91'">>}]}, {move_state,999, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_999_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_999_'ns_1@10.242.238.90'">>}]}, {move_state,489, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_489_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_489_'ns_1@10.242.238.91'">>}]}, {move_state,1000, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1000_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1000_'ns_1@10.242.238.90'">>}]}, {move_state,746, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_746_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_746_'ns_1@10.242.238.91'">>}]}, {move_state,1001, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_1001_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_1001_'ns_1@10.242.238.90'">>}]}] [ns_server:debug,2014-08-19T16:50:02.954,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 980, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:02.955,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 470, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.955,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 726, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 981, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:02.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 471, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 727, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 982, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:02.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 472, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 728, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 983, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:02.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 473, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.964,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 775. Nacking mccouch update. [views:debug,2014-08-19T16:50:02.964,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/775. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:02.965,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",775,active,0} [ns_server:debug,2014-08-19T16:50:02.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 729, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 984, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:02.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 474, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.966,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829, 646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,777, 722,594,228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670, 542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618, 252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694,566,200, 1005,928,800,434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148, 876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,718,590,224,952, 824,458,330,875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900,772, 406,278,951,823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976,848, 482,354,899,716,588,222,950,822,456,328,873,690,562,196,1001,924,796,430,302, 975,847,664,536,170,898,770,404,276,949,821,766,638,144,872,506,378,1000,923, 795,740,612,246,118,974,846,480,352,897,714,586,220,948,884,820,454,390,326, 262,1012,999,935,871,807,752,688,624,560,194,130,986,922,858,794,492,428,364, 300,973,909,845,781,726,662,598,534,232,168,960,896,832,768,466,402,338,274, 947,883,819,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312,985, 921,857,793,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959, 895,831,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997, 869,686,558,192,920,792,426,298,971,843,660,532,166,894,400,272,1022,945,817, 762,634,140,996,868,502,374,919,791,736,608,242,114,970,842,476,348,893,710, 582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296,969,841,658, 530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606, 240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865,682,554, 188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136, 992,864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017, 940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888, 394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964, 836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784, 418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494, 366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442,314, 987,859,676,548,182,910,782,416,288,961,833,650,522,156] [ns_server:debug,2014-08-19T16:50:02.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 730, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 985, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:02.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 475, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 731, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 986, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:02.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 476, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 732, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 987, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:02.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 477, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 733, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 988, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:02.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 478, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 734, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 989, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:02.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 479, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 735, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 990, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:02.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 480, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 736, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 991, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:02.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 481, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 737, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 992, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:02.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 482, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 738, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 993, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:02.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 483, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 739, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 994, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:02.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 484, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 740, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 995, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:02.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 485, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 741, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 996, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:02.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 486, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 742, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 997, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [rebalance:info,2014-08-19T16:50:02.979,ns_1@10.242.238.88:<0.14.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 746) [ns_server:debug,2014-08-19T16:50:02.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 487, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [rebalance:info,2014-08-19T16:50:02.979,ns_1@10.242.238.88:<0.31052.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 987 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:02.980,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 987 state to active [ns_server:debug,2014-08-19T16:50:02.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 998, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [rebalance:info,2014-08-19T16:50:02.980,ns_1@10.242.238.88:<0.29974.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:02.981,ns_1@10.242.238.88:<0.31052.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 987 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:02.981,ns_1@10.242.238.88:<0.31052.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:02.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 744, [{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 999, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:info,2014-08-19T16:50:02.983,ns_1@10.242.238.88:<0.29982.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_746_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:02.983,ns_1@10.242.238.88:<0.29974.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:02.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 489, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1000, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:02.985,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 746, [{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:02.985,ns_1@10.242.238.88:<0.29974.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 746 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.28.1> [ns_server:debug,2014-08-19T16:50:02.985,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 1001, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:info,2014-08-19T16:50:02.986,ns_1@10.242.238.88:<0.28.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 746 to state replica [ns_server:debug,2014-08-19T16:50:02.987,ns_1@10.242.238.88:<0.32728.0>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_744 [rebalance:info,2014-08-19T16:50:02.989,ns_1@10.242.238.88:<0.32728.0>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[744]}, {checkpoints,[{744,1}]}, {name,<<"rebalance_744">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[744]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"744"}]} [rebalance:debug,2014-08-19T16:50:02.990,ns_1@10.242.238.88:<0.32728.0>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.29.1> [rebalance:info,2014-08-19T16:50:02.991,ns_1@10.242.238.88:<0.32728.0>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:02.993,ns_1@10.242.238.88:<0.32728.0>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:02.993,ns_1@10.242.238.88:<0.32728.0>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:02.994,ns_1@10.242.238.88:<0.30129.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 744 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:02.995,ns_1@10.242.238.88:<0.30137.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [views:debug,2014-08-19T16:50:02.998,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/775. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:02.998,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",775,active,0} [ns_server:info,2014-08-19T16:50:02.999,ns_1@10.242.238.88:<0.30137.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_744_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:02.999,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 744 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:02.999,ns_1@10.242.238.88:<0.33.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 744 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:03.012,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.013,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.013,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{744, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:03.014,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:03.015,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.023,ns_1@10.242.238.88:<0.28.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_746 [rebalance:info,2014-08-19T16:50:03.024,ns_1@10.242.238.88:<0.28.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[746]}, {checkpoints,[{746,1}]}, {name,<<"rebalance_746">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[746]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"746"}]} [rebalance:debug,2014-08-19T16:50:03.025,ns_1@10.242.238.88:<0.28.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.42.1> [rebalance:info,2014-08-19T16:50:03.026,ns_1@10.242.238.88:<0.28.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:03.027,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 744 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:03.027,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 744) [rebalance:debug,2014-08-19T16:50:03.028,ns_1@10.242.238.88:<0.28.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:03.028,ns_1@10.242.238.88:<0.28.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:03.028,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:03.029,ns_1@10.242.238.88:<0.29974.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 746 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:03.031,ns_1@10.242.238.88:<0.29982.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:03.034,ns_1@10.242.238.88:<0.29982.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_746_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.034,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 746 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:03.034,ns_1@10.242.238.88:<0.47.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 746 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:03.045,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.046,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.046,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{746, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:03.046,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:50:03.047,ns_1@10.242.238.88:<0.31655.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 980 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:03.047,ns_1@10.242.238.88:<0.30898.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 989 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:debug,2014-08-19T16:50:03.047,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:03.048,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 980 state to active [rebalance:info,2014-08-19T16:50:03.049,ns_1@10.242.238.88:<0.31655.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 980 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:03.049,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 989 state to active [rebalance:info,2014-08-19T16:50:03.050,ns_1@10.242.238.88:<0.30898.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 989 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:03.050,ns_1@10.242.238.88:<0.31655.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:03.050,ns_1@10.242.238.88:<0.30898.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:03.055,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 746 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:03.055,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 746) [ns_server:debug,2014-08-19T16:50:03.056,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:03.109,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 773. Nacking mccouch update. [views:debug,2014-08-19T16:50:03.109,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/773. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:03.110,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",773,active,0} [ns_server:debug,2014-08-19T16:50:03.111,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829, 646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,777, 722,594,228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670, 542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618, 252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694,566,200, 1005,928,800,434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148, 876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773,718,590,224, 952,824,458,330,875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900, 772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976, 848,482,354,899,716,588,222,950,822,456,328,873,690,562,196,1001,924,796,430, 302,975,847,664,536,170,898,770,404,276,949,821,766,638,144,872,506,378,1000, 923,795,740,612,246,118,974,846,480,352,897,714,586,220,948,884,820,454,390, 326,262,1012,999,935,871,807,752,688,624,560,194,130,986,922,858,794,492,428, 364,300,973,909,845,781,726,662,598,534,232,168,960,896,832,768,466,402,338, 274,947,883,819,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312, 985,921,857,793,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 959,895,831,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010, 997,869,686,558,192,920,792,426,298,971,843,660,532,166,894,400,272,1022,945, 817,762,634,140,996,868,502,374,919,791,736,608,242,114,970,842,476,348,893, 710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296,969,841, 658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734, 606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865,682, 554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630, 136,992,864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212, 1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160, 888,394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108, 964,836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912, 784,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860, 494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442, 314,987,859,676,548,182,910,782,416,288,961,833,650,522,156] [rebalance:info,2014-08-19T16:50:03.155,ns_1@10.242.238.88:<0.30744.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 991 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:03.155,ns_1@10.242.238.88:<0.31501.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 982 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:03.155,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 991 state to active [rebalance:info,2014-08-19T16:50:03.156,ns_1@10.242.238.88:<0.30744.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 991 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:03.157,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 982 state to active [rebalance:info,2014-08-19T16:50:03.158,ns_1@10.242.238.88:<0.31501.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 982 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:03.158,ns_1@10.242.238.88:<0.30744.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:03.158,ns_1@10.242.238.88:<0.31501.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:03.168,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/773. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:03.168,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",773,active,0} [rebalance:info,2014-08-19T16:50:03.222,ns_1@10.242.238.88:<0.30590.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 993 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:03.222,ns_1@10.242.238.88:<0.31347.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 984 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:03.222,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 993 state to active [rebalance:info,2014-08-19T16:50:03.223,ns_1@10.242.238.88:<0.30590.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 993 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:03.224,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 984 state to active [rebalance:info,2014-08-19T16:50:03.225,ns_1@10.242.238.88:<0.31347.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 984 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:03.225,ns_1@10.242.238.88:<0.30590.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:03.225,ns_1@10.242.238.88:<0.31347.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:03.243,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 771. Nacking mccouch update. [views:debug,2014-08-19T16:50:03.243,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/771. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:03.244,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",771,active,0} [ns_server:debug,2014-08-19T16:50:03.245,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829, 646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,777, 722,594,228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670, 542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618, 252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694,566,200, 1005,928,800,434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148, 876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773,718,590,224, 952,824,458,330,875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900, 772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976, 848,482,354,899,771,716,588,222,950,822,456,328,873,690,562,196,1001,924,796, 430,302,975,847,664,536,170,898,770,404,276,949,821,766,638,144,872,506,378, 1000,923,795,740,612,246,118,974,846,480,352,897,714,586,220,948,884,820,454, 390,326,262,1012,999,935,871,807,752,688,624,560,194,130,986,922,858,794,492, 428,364,300,973,909,845,781,726,662,598,534,232,168,960,896,832,768,466,402, 338,274,947,883,819,764,700,636,572,206,142,1011,998,934,870,806,504,440,376, 312,985,921,857,793,738,674,610,546,244,180,116,972,908,844,780,478,414,350, 286,959,895,831,712,648,584,520,218,154,1023,946,882,818,452,388,324,260, 1010,997,869,686,558,192,920,792,426,298,971,843,660,532,166,894,400,272, 1022,945,817,762,634,140,996,868,502,374,919,791,736,608,242,114,970,842,476, 348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296, 969,841,658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917, 789,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993, 865,682,554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813, 758,630,136,992,864,498,370,915,787,732,604,238,110,966,838,472,344,889,706, 578,212,1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654, 526,160,888,394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602, 236,108,964,836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550, 184,912,784,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132, 988,860,494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936, 808,442,314,987,859,676,548,182,910,782,416,288,961,833,650,522,156] [views:debug,2014-08-19T16:50:03.277,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/771. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:03.277,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",771,active,0} [rebalance:info,2014-08-19T16:50:03.331,ns_1@10.242.238.88:<0.31187.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 986 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:03.331,ns_1@10.242.238.88:<0.30425.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 995 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:03.331,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 986 state to active [rebalance:info,2014-08-19T16:50:03.332,ns_1@10.242.238.88:<0.31187.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 986 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:03.333,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 995 state to active [rebalance:info,2014-08-19T16:50:03.333,ns_1@10.242.238.88:<0.30425.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 995 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:03.334,ns_1@10.242.238.88:<0.31187.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:03.334,ns_1@10.242.238.88:<0.30425.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:03.354,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 769. Nacking mccouch update. [views:debug,2014-08-19T16:50:03.354,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/769. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:03.354,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",769,active,0} [ns_server:debug,2014-08-19T16:50:03.355,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829, 646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,777, 722,594,228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670, 542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618, 252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694,566,200, 1005,928,800,434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148, 876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773,718,590,224, 952,824,458,330,875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900, 772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976, 848,482,354,899,771,716,588,222,950,822,456,328,873,690,562,196,1001,924,796, 430,302,975,847,664,536,170,898,770,404,276,949,821,766,638,144,872,506,378, 1000,923,795,740,612,246,118,974,846,480,352,897,769,714,586,220,948,820,454, 326,999,935,871,807,752,688,624,560,194,130,986,922,858,794,492,428,364,300, 973,909,845,781,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947, 883,819,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921, 857,793,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,895, 831,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997,869, 686,558,192,920,792,426,298,971,843,660,532,166,894,400,272,1022,945,817,762, 634,140,996,868,502,374,919,791,736,608,242,114,970,842,476,348,893,710,582, 216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296,969,841,658,530, 164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606,240, 112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865,682,554,188, 916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992, 864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940, 812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888,394, 266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418, 290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494,366, 911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442,314,987, 859,676,548,182,910,782,416,288,961,833,650,522,156,884,390,262,1012] [views:debug,2014-08-19T16:50:03.412,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/769. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:03.413,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",769,active,0} [rebalance:info,2014-08-19T16:50:03.415,ns_1@10.242.238.88:<0.30262.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 997 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:03.415,ns_1@10.242.238.88:<0.30975.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 988 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:03.415,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 997 state to active [rebalance:info,2014-08-19T16:50:03.416,ns_1@10.242.238.88:<0.30262.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 997 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:03.417,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 988 state to active [rebalance:info,2014-08-19T16:50:03.417,ns_1@10.242.238.88:<0.30975.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 988 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:03.418,ns_1@10.242.238.88:<0.30262.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:03.418,ns_1@10.242.238.88:<0.30975.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:03.488,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 767. Nacking mccouch update. [views:debug,2014-08-19T16:50:03.488,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/767. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:03.488,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",767,active,0} [ns_server:debug,2014-08-19T16:50:03.489,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829, 646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,777, 722,594,228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670, 542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618, 252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694,566,200, 1005,928,800,434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148, 876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773,718,590,224, 952,824,458,330,875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900, 772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976, 848,482,354,899,771,716,588,222,950,822,767,456,328,873,690,562,196,1001,924, 796,430,302,975,847,664,536,170,898,770,404,276,949,821,766,638,144,872,506, 378,1000,923,795,740,612,246,118,974,846,480,352,897,769,714,586,220,948,820, 454,326,999,935,871,807,752,688,624,560,194,130,986,922,858,794,492,428,364, 300,973,909,845,781,726,662,598,534,232,168,960,896,832,768,466,402,338,274, 947,883,819,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312,985, 921,857,793,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959, 895,831,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010,997, 869,686,558,192,920,792,426,298,971,843,660,532,166,894,400,272,1022,945,817, 762,634,140,996,868,502,374,919,791,736,608,242,114,970,842,476,348,893,710, 582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296,969,841,658, 530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606, 240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865,682,554, 188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136, 992,864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017, 940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888, 394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964, 836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784, 418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494, 366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442,314, 987,859,676,548,182,910,782,416,288,961,833,650,522,156,884,390,262,1012] [rebalance:info,2014-08-19T16:50:03.508,ns_1@10.242.238.88:<0.30821.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 990 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:03.508,ns_1@10.242.238.88:<0.158.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 999) [ns_server:info,2014-08-19T16:50:03.509,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 990 state to active [rebalance:info,2014-08-19T16:50:03.509,ns_1@10.242.238.88:<0.30107.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:03.510,ns_1@10.242.238.88:<0.30821.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 990 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:03.511,ns_1@10.242.238.88:<0.30821.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:03.513,ns_1@10.242.238.88:<0.30115.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_999_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.513,ns_1@10.242.238.88:<0.30107.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:03.515,ns_1@10.242.238.88:<0.30107.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 999 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.165.1> [ns_server:info,2014-08-19T16:50:03.516,ns_1@10.242.238.88:<0.165.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 999 to state replica [views:debug,2014-08-19T16:50:03.522,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/767. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:03.522,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",767,active,0} [ns_server:debug,2014-08-19T16:50:03.550,ns_1@10.242.238.88:<0.165.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_999 [rebalance:info,2014-08-19T16:50:03.552,ns_1@10.242.238.88:<0.165.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[999]}, {checkpoints,[{999,1}]}, {name,<<"rebalance_999">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[999]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"999"}]} [rebalance:debug,2014-08-19T16:50:03.552,ns_1@10.242.238.88:<0.165.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.166.1> [rebalance:info,2014-08-19T16:50:03.553,ns_1@10.242.238.88:<0.165.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:03.555,ns_1@10.242.238.88:<0.165.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:03.555,ns_1@10.242.238.88:<0.165.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:03.556,ns_1@10.242.238.88:<0.30107.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 999 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:03.558,ns_1@10.242.238.88:<0.30115.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:03.558,ns_1@10.242.238.88:<0.30667.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 992 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:03.558,ns_1@10.242.238.88:<0.173.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1001) [ns_server:info,2014-08-19T16:50:03.559,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 992 state to active [rebalance:info,2014-08-19T16:50:03.559,ns_1@10.242.238.88:<0.29945.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:03.560,ns_1@10.242.238.88:<0.30667.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 992 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:03.560,ns_1@10.242.238.88:<0.30667.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:03.561,ns_1@10.242.238.88:<0.30115.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_999_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:03.561,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 999 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:03.561,ns_1@10.242.238.88:<0.183.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 999 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:info,2014-08-19T16:50:03.562,ns_1@10.242.238.88:<0.29955.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1001_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.562,ns_1@10.242.238.88:<0.29945.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:03.564,ns_1@10.242.238.88:<0.29945.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1001 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.192.1> [ns_server:info,2014-08-19T16:50:03.565,ns_1@10.242.238.88:<0.192.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1001 to state replica [ns_server:debug,2014-08-19T16:50:03.579,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.580,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.581,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{999, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:03.581,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.583,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:50:03.590,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 999 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:03.591,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 999) [ns_server:debug,2014-08-19T16:50:03.593,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:03.601,ns_1@10.242.238.88:<0.192.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1001 [rebalance:info,2014-08-19T16:50:03.602,ns_1@10.242.238.88:<0.192.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1001]}, {checkpoints,[{1001,1}]}, {name,<<"rebalance_1001">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1001]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1001"}]} [rebalance:debug,2014-08-19T16:50:03.603,ns_1@10.242.238.88:<0.192.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.202.1> [rebalance:info,2014-08-19T16:50:03.604,ns_1@10.242.238.88:<0.192.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:03.606,ns_1@10.242.238.88:<0.192.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:03.606,ns_1@10.242.238.88:<0.192.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:03.607,ns_1@10.242.238.88:<0.29945.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1001 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:03.609,ns_1@10.242.238.88:<0.29955.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:03.609,ns_1@10.242.238.88:<0.30512.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 994 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:03.610,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 994 state to active [rebalance:info,2014-08-19T16:50:03.610,ns_1@10.242.238.88:<0.30512.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 994 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:03.611,ns_1@10.242.238.88:<0.30512.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:03.612,ns_1@10.242.238.88:<0.29955.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1001_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:03.612,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1001 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:03.612,ns_1@10.242.238.88:<0.210.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 1001 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:03.615,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 765. Nacking mccouch update. [views:debug,2014-08-19T16:50:03.615,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/765. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:03.615,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",765,active,0} [ns_server:debug,2014-08-19T16:50:03.617,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829, 646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,777, 722,594,228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670, 542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618, 252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694,566,200, 1005,928,800,434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148, 876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773,718,590,224, 952,824,458,330,875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900, 772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976, 848,482,354,899,771,716,588,222,950,822,767,456,328,873,690,562,196,1001,924, 796,430,302,975,847,664,536,170,898,770,404,276,949,821,766,638,144,872,506, 378,1000,923,795,740,612,246,118,974,846,480,352,897,769,714,586,220,948,820, 765,454,326,999,935,871,807,752,688,624,560,194,130,986,922,858,794,492,428, 364,300,973,909,845,781,726,662,598,534,232,168,960,896,832,768,466,402,338, 274,947,883,819,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312, 985,921,857,793,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 959,895,831,712,648,584,520,218,154,1023,946,882,818,452,388,324,260,1010, 997,869,686,558,192,920,792,426,298,971,843,660,532,166,894,400,272,1022,945, 817,762,634,140,996,868,502,374,919,791,736,608,242,114,970,842,476,348,893, 710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296,969,841, 658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734, 606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993,865,682, 554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630, 136,992,864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212, 1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160, 888,394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108, 964,836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912, 784,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860, 494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442, 314,987,859,676,548,182,910,782,416,288,961,833,650,522,156,884,390,262,1012] [ns_server:debug,2014-08-19T16:50:03.626,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.627,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.627,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:03.628,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1001, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:03.628,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:03.635,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1001 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:03.636,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1001) [ns_server:debug,2014-08-19T16:50:03.637,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [views:debug,2014-08-19T16:50:03.674,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/765. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:03.674,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",765,active,0} [rebalance:info,2014-08-19T16:50:03.768,ns_1@10.242.238.88:<0.30339.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 996 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:03.768,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 996 state to active [rebalance:info,2014-08-19T16:50:03.769,ns_1@10.242.238.88:<0.30339.0>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 996 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:03.769,ns_1@10.242.238.88:<0.30339.0>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:03.818,ns_1@10.242.238.88:<0.238.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 998) [rebalance:info,2014-08-19T16:50:03.818,ns_1@10.242.238.88:<0.239.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 1000) [rebalance:info,2014-08-19T16:50:03.818,ns_1@10.242.238.88:<0.241.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 487) [rebalance:info,2014-08-19T16:50:03.818,ns_1@10.242.238.88:<0.240.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 489) [rebalance:info,2014-08-19T16:50:03.819,ns_1@10.242.238.88:<0.242.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 471) [rebalance:info,2014-08-19T16:50:03.819,ns_1@10.242.238.88:<0.243.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 477) [rebalance:info,2014-08-19T16:50:03.819,ns_1@10.242.238.88:<0.244.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 473) [rebalance:info,2014-08-19T16:50:03.819,ns_1@10.242.238.88:<0.245.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 475) [rebalance:info,2014-08-19T16:50:03.819,ns_1@10.242.238.88:<0.246.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 470) [rebalance:info,2014-08-19T16:50:03.819,ns_1@10.242.238.88:<0.30227.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:03.819,ns_1@10.242.238.88:<0.247.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 479) [rebalance:info,2014-08-19T16:50:03.820,ns_1@10.242.238.88:<0.30086.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:03.820,ns_1@10.242.238.88:<0.30185.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:03.820,ns_1@10.242.238.88:<0.31389.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:03.820,ns_1@10.242.238.88:<0.248.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 472) [rebalance:info,2014-08-19T16:50:03.820,ns_1@10.242.238.88:<0.31557.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:03.820,ns_1@10.242.238.88:<0.31031.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:03.820,ns_1@10.242.238.88:<0.249.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 474) [rebalance:info,2014-08-19T16:50:03.820,ns_1@10.242.238.88:<0.31234.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:03.820,ns_1@10.242.238.88:<0.250.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 483) [rebalance:info,2014-08-19T16:50:03.821,ns_1@10.242.238.88:<0.251.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 481) [rebalance:info,2014-08-19T16:50:03.821,ns_1@10.242.238.88:<0.31634.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:03.821,ns_1@10.242.238.88:<0.252.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 485) [rebalance:info,2014-08-19T16:50:03.821,ns_1@10.242.238.88:<0.253.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 476) [rebalance:info,2014-08-19T16:50:03.821,ns_1@10.242.238.88:<0.254.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 478) [rebalance:info,2014-08-19T16:50:03.821,ns_1@10.242.238.88:<0.255.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 482) [rebalance:info,2014-08-19T16:50:03.821,ns_1@10.242.238.88:<0.30877.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:03.821,ns_1@10.242.238.88:<0.256.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 727) [rebalance:info,2014-08-19T16:50:03.821,ns_1@10.242.238.88:<0.31480.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:03.821,ns_1@10.242.238.88:<0.257.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 729) [rebalance:info,2014-08-19T16:50:03.821,ns_1@10.242.238.88:<0.258.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 480) [rebalance:info,2014-08-19T16:50:03.822,ns_1@10.242.238.88:<0.30030.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:03.822,ns_1@10.242.238.88:<0.259.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 733) [rebalance:info,2014-08-19T16:50:03.822,ns_1@10.242.238.88:<0.260.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 484) [rebalance:info,2014-08-19T16:50:03.822,ns_1@10.242.238.88:<0.261.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 731) [rebalance:info,2014-08-19T16:50:03.822,ns_1@10.242.238.88:<0.262.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 735) [rebalance:info,2014-08-19T16:50:03.822,ns_1@10.242.238.88:<0.263.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 486) [rebalance:info,2014-08-19T16:50:03.822,ns_1@10.242.238.88:<0.31311.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:03.822,ns_1@10.242.238.88:<0.264.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 726) [rebalance:info,2014-08-19T16:50:03.822,ns_1@10.242.238.88:<0.265.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 737) [rebalance:info,2014-08-19T16:50:03.822,ns_1@10.242.238.88:<0.266.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 728) [rebalance:info,2014-08-19T16:50:03.822,ns_1@10.242.238.88:<0.267.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 741) [rebalance:info,2014-08-19T16:50:03.822,ns_1@10.242.238.88:<0.30569.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:03.823,ns_1@10.242.238.88:<0.269.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 739) [rebalance:info,2014-08-19T16:50:03.823,ns_1@10.242.238.88:<0.268.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 730) [rebalance:info,2014-08-19T16:50:03.823,ns_1@10.242.238.88:<0.30723.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:03.823,ns_1@10.242.238.88:<0.270.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 732) [rebalance:info,2014-08-19T16:50:03.823,ns_1@10.242.238.88:<0.31522.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:03.823,ns_1@10.242.238.88:<0.271.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 981) [rebalance:info,2014-08-19T16:50:03.823,ns_1@10.242.238.88:<0.272.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 734) [rebalance:info,2014-08-19T16:50:03.823,ns_1@10.242.238.88:<0.31152.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:03.823,ns_1@10.242.238.88:<0.273.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 985) [rebalance:info,2014-08-19T16:50:03.823,ns_1@10.242.238.88:<0.30400.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:03.823,ns_1@10.242.238.88:<0.31368.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:03.823,ns_1@10.242.238.88:<0.30996.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:03.823,ns_1@10.242.238.88:<0.274.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 983) [rebalance:info,2014-08-19T16:50:03.823,ns_1@10.242.238.88:<0.31208.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:03.823,ns_1@10.242.238.88:<0.275.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 738) [rebalance:info,2014-08-19T16:50:03.823,ns_1@10.242.238.88:<0.30940.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:03.824,ns_1@10.242.238.88:<0.277.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 987) [rebalance:info,2014-08-19T16:50:03.824,ns_1@10.242.238.88:<0.30842.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:03.824,ns_1@10.242.238.88:<0.30646.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:03.824,ns_1@10.242.238.88:<0.278.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 736) [rebalance:info,2014-08-19T16:50:03.824,ns_1@10.242.238.88:<0.30360.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:03.824,ns_1@10.242.238.88:<0.30800.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:03.824,ns_1@10.242.238.88:<0.279.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 740) [rebalance:info,2014-08-19T16:50:03.824,ns_1@10.242.238.88:<0.280.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 991) [ns_server:info,2014-08-19T16:50:03.824,ns_1@10.242.238.88:<0.30235.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_487_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:03.824,ns_1@10.242.238.88:<0.31445.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:03.824,ns_1@10.242.238.88:<0.282.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 989) [rebalance:info,2014-08-19T16:50:03.824,ns_1@10.242.238.88:<0.30481.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:03.824,ns_1@10.242.238.88:<0.30227.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:03.824,ns_1@10.242.238.88:<0.283.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 742) [rebalance:info,2014-08-19T16:50:03.824,ns_1@10.242.238.88:<0.30304.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:03.824,ns_1@10.242.238.88:<0.284.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 993) [rebalance:info,2014-08-19T16:50:03.825,ns_1@10.242.238.88:<0.285.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 982) [rebalance:info,2014-08-19T16:50:03.825,ns_1@10.242.238.88:<0.286.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 980) [rebalance:info,2014-08-19T16:50:03.825,ns_1@10.242.238.88:<0.30702.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:03.825,ns_1@10.242.238.88:<0.31613.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:03.825,ns_1@10.242.238.88:<0.288.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 995) [rebalance:info,2014-08-19T16:50:03.825,ns_1@10.242.238.88:<0.289.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 984) [rebalance:info,2014-08-19T16:50:03.825,ns_1@10.242.238.88:<0.30533.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:03.825,ns_1@10.242.238.88:<0.290.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 997) [rebalance:info,2014-08-19T16:50:03.825,ns_1@10.242.238.88:<0.292.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 986) [rebalance:info,2014-08-19T16:50:03.825,ns_1@10.242.238.88:<0.291.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 988) [rebalance:info,2014-08-19T16:50:03.826,ns_1@10.242.238.88:<0.31290.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:03.826,ns_1@10.242.238.88:<0.31087.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:03.825,ns_1@10.242.238.88:<0.293.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 990) [ns_server:info,2014-08-19T16:50:03.826,ns_1@10.242.238.88:<0.30193.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_998_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.826,ns_1@10.242.238.88:<0.295.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 994) [rebalance:info,2014-08-19T16:50:03.826,ns_1@10.242.238.88:<0.30919.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:03.826,ns_1@10.242.238.88:<0.30185.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:03.826,ns_1@10.242.238.88:<0.30625.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:03.826,ns_1@10.242.238.88:<0.297.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 992) [rebalance:info,2014-08-19T16:50:03.826,ns_1@10.242.238.88:<0.298.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 996) [rebalance:info,2014-08-19T16:50:03.826,ns_1@10.242.238.88:<0.30779.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [ns_server:info,2014-08-19T16:50:03.826,ns_1@10.242.238.88:<0.30094.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_489_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:03.826,ns_1@10.242.238.88:<0.31578.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:03.827,ns_1@10.242.238.88:<0.30086.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:03.827,ns_1@10.242.238.88:<0.30446.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:03.827,ns_1@10.242.238.88:<0.31269.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:03.827,ns_1@10.242.238.88:<0.31424.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:03.827,ns_1@10.242.238.88:<0.30283.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:03.827,ns_1@10.242.238.88:<0.31052.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:03.827,ns_1@10.242.238.88:<0.30744.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:03.827,ns_1@10.242.238.88:<0.31187.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:03.828,ns_1@10.242.238.88:<0.30898.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:03.828,ns_1@10.242.238.88:<0.30590.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:03.828,ns_1@10.242.238.88:<0.30975.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:03.829,ns_1@10.242.238.88:<0.30262.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:03.829,ns_1@10.242.238.88:<0.31347.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:50:03.829,ns_1@10.242.238.88:<0.31565.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_471_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:03.829,ns_1@10.242.238.88:<0.30425.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:03.829,ns_1@10.242.238.88:<0.31557.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:03.829,ns_1@10.242.238.88:<0.31501.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:50:03.829,ns_1@10.242.238.88:<0.31397.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_473_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:03.829,ns_1@10.242.238.88:<0.31655.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:03.830,ns_1@10.242.238.88:<0.31389.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:03.830,ns_1@10.242.238.88:<0.30821.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:03.830,ns_1@10.242.238.88:<0.30512.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:03.830,ns_1@10.242.238.88:<0.30339.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:03.830,ns_1@10.242.238.88:<0.30667.0>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:50:03.832,ns_1@10.242.238.88:<0.31039.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_477_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:03.833,ns_1@10.242.238.88:<0.31031.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.837,ns_1@10.242.238.88:<0.31242.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_475_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:03.837,ns_1@10.242.238.88:<0.31234.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.837,ns_1@10.242.238.88:<0.31642.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_470_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:03.838,ns_1@10.242.238.88:<0.31634.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.838,ns_1@10.242.238.88:<0.30885.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_479_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:03.838,ns_1@10.242.238.88:<0.30877.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.839,ns_1@10.242.238.88:<0.30038.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1000_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.839,ns_1@10.242.238.88:<0.30030.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.842,ns_1@10.242.238.88:<0.31488.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_472_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:03.842,ns_1@10.242.238.88:<0.31480.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.845,ns_1@10.242.238.88:<0.31319.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_474_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:03.845,ns_1@10.242.238.88:<0.31311.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.846,ns_1@10.242.238.88:<0.30577.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_483_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:03.846,ns_1@10.242.238.88:<0.30569.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.847,ns_1@10.242.238.88:<0.30731.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_481_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:03.847,ns_1@10.242.238.88:<0.30723.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.848,ns_1@10.242.238.88:<0.31160.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_476_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:03.848,ns_1@10.242.238.88:<0.31152.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.850,ns_1@10.242.238.88:<0.31530.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_727_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:03.850,ns_1@10.242.238.88:<0.31522.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.851,ns_1@10.242.238.88:<0.30408.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_485_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:03.851,ns_1@10.242.238.88:<0.30400.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.852,ns_1@10.242.238.88:<0.31376.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_729_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:03.852,ns_1@10.242.238.88:<0.31368.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.853,ns_1@10.242.238.88:<0.30850.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_735_'ns_1@10.242.238.90'">>] [ns_server:info,2014-08-19T16:50:03.853,ns_1@10.242.238.88:<0.31004.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_733_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:03.853,ns_1@10.242.238.88:<0.30842.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:03.853,ns_1@10.242.238.88:<0.30996.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.853,ns_1@10.242.238.88:<0.30654.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_482_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:03.854,ns_1@10.242.238.88:<0.30646.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.854,ns_1@10.242.238.88:<0.30808.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_480_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:03.854,ns_1@10.242.238.88:<0.30800.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.855,ns_1@10.242.238.88:<0.30948.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_478_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:03.855,ns_1@10.242.238.88:<0.30940.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.855,ns_1@10.242.238.88:<0.31216.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_731_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:03.855,ns_1@10.242.238.88:<0.31208.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.856,ns_1@10.242.238.88:<0.30368.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_741_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:03.856,ns_1@10.242.238.88:<0.30360.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.856,ns_1@10.242.238.88:<0.30312.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_486_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:03.856,ns_1@10.242.238.88:<0.30304.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.857,ns_1@10.242.238.88:<0.30497.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_484_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:03.857,ns_1@10.242.238.88:<0.30481.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.857,ns_1@10.242.238.88:<0.31453.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_728_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:03.857,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 763. Nacking mccouch update. [views:debug,2014-08-19T16:50:03.857,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/763. Updated state: active (0) [rebalance:info,2014-08-19T16:50:03.857,ns_1@10.242.238.88:<0.31445.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:03.858,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",763,active,0} [ns_server:info,2014-08-19T16:50:03.858,ns_1@10.242.238.88:<0.30710.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_737_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:03.858,ns_1@10.242.238.88:<0.30702.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.858,ns_1@10.242.238.88:<0.31621.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_726_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:03.858,ns_1@10.242.238.88:<0.31613.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.859,ns_1@10.242.238.88:<0.30541.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_739_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:03.859,ns_1@10.242.238.88:<0.30533.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.860,ns_1@10.242.238.88:<0.31298.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_730_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:03.860,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829, 646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,777, 722,594,228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670, 542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618, 252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694,566,200, 1005,928,800,434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148, 876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773,718,590,224, 952,824,458,330,875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900, 772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976, 848,482,354,899,771,716,588,222,950,822,767,456,328,873,690,562,196,1001,924, 796,430,302,975,847,664,536,170,898,770,404,276,949,821,766,638,144,872,506, 378,1000,923,795,740,612,246,118,974,846,480,352,897,769,714,586,220,948,820, 765,454,326,999,935,871,807,752,688,624,560,194,130,986,922,858,794,492,428, 364,300,973,909,845,781,726,662,598,534,232,168,960,896,832,768,466,402,338, 274,947,883,819,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312, 985,921,857,793,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 959,895,831,712,648,584,520,218,154,1023,946,882,818,763,452,388,324,260, 1010,997,869,686,558,192,920,792,426,298,971,843,660,532,166,894,400,272, 1022,945,817,762,634,140,996,868,502,374,919,791,736,608,242,114,970,842,476, 348,893,710,582,216,1021,944,816,450,322,995,867,684,556,190,918,790,424,296, 969,841,658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917, 789,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320,993, 865,682,554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813, 758,630,136,992,864,498,370,915,787,732,604,238,110,966,838,472,344,889,706, 578,212,1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837,654, 526,160,888,394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602, 236,108,964,836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550, 184,912,784,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132, 988,860,494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936, 808,442,314,987,859,676,548,182,910,782,416,288,961,833,650,522,156,884,390, 262,1012] [rebalance:info,2014-08-19T16:50:03.860,ns_1@10.242.238.88:<0.31290.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.861,ns_1@10.242.238.88:<0.31095.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_732_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:03.861,ns_1@10.242.238.88:<0.31087.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.861,ns_1@10.242.238.88:<0.30633.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_738_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:03.861,ns_1@10.242.238.88:<0.30625.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.862,ns_1@10.242.238.88:<0.30927.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_734_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:03.862,ns_1@10.242.238.88:<0.30919.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.863,ns_1@10.242.238.88:<0.30787.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_736_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:03.863,ns_1@10.242.238.88:<0.30779.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.864,ns_1@10.242.238.88:<0.30454.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_740_'ns_1@10.242.238.90'">>] [ns_server:info,2014-08-19T16:50:03.864,ns_1@10.242.238.88:<0.30291.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_742_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:03.864,ns_1@10.242.238.88:<0.30283.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:03.864,ns_1@10.242.238.88:<0.30446.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.865,ns_1@10.242.238.88:<0.31586.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_981_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.865,ns_1@10.242.238.88:<0.31578.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.865,ns_1@10.242.238.88:<0.31277.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_985_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.865,ns_1@10.242.238.88:<0.31269.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.865,ns_1@10.242.238.88:<0.31432.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_983_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.865,ns_1@10.242.238.88:<0.31424.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.866,ns_1@10.242.238.88:<0.31060.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_987_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.866,ns_1@10.242.238.88:<0.31052.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.866,ns_1@10.242.238.88:<0.30752.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_991_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.867,ns_1@10.242.238.88:<0.30744.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.867,ns_1@10.242.238.88:<0.31195.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_986_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.867,ns_1@10.242.238.88:<0.31187.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.867,ns_1@10.242.238.88:<0.30598.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_993_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.867,ns_1@10.242.238.88:<0.30590.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.867,ns_1@10.242.238.88:<0.30906.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_989_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.867,ns_1@10.242.238.88:<0.30898.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.868,ns_1@10.242.238.88:<0.30983.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_988_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.868,ns_1@10.242.238.88:<0.30975.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.868,ns_1@10.242.238.88:<0.30270.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_997_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.869,ns_1@10.242.238.88:<0.30262.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.869,ns_1@10.242.238.88:<0.31355.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_984_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.870,ns_1@10.242.238.88:<0.31347.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.870,ns_1@10.242.238.88:<0.30433.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_995_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.870,ns_1@10.242.238.88:<0.30425.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.871,ns_1@10.242.238.88:<0.31509.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_982_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.871,ns_1@10.242.238.88:<0.31501.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.872,ns_1@10.242.238.88:<0.31669.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_980_'ns_1@10.242.238.91'">>] [ns_server:info,2014-08-19T16:50:03.872,ns_1@10.242.238.88:<0.30829.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_990_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.872,ns_1@10.242.238.88:<0.31655.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:03.872,ns_1@10.242.238.88:<0.30821.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.872,ns_1@10.242.238.88:<0.30520.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_994_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.872,ns_1@10.242.238.88:<0.30512.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.873,ns_1@10.242.238.88:<0.30347.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_996_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.873,ns_1@10.242.238.88:<0.30339.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:03.874,ns_1@10.242.238.88:<0.30675.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_992_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.874,ns_1@10.242.238.88:<0.30667.0>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:03.875,ns_1@10.242.238.88:<0.30227.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 487 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.341.1> [ns_server:info,2014-08-19T16:50:03.876,ns_1@10.242.238.88:<0.341.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 487 to state replica [ns_server:debug,2014-08-19T16:50:03.878,ns_1@10.242.238.88:<0.30185.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 998 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.355.1> [ns_server:info,2014-08-19T16:50:03.878,ns_1@10.242.238.88:<0.355.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 998 to state replica [ns_server:debug,2014-08-19T16:50:03.884,ns_1@10.242.238.88:<0.30086.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 489 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.366.1> [ns_server:info,2014-08-19T16:50:03.885,ns_1@10.242.238.88:<0.366.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 489 to state replica [ns_server:debug,2014-08-19T16:50:03.891,ns_1@10.242.238.88:<0.31557.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 471 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.392.1> [ns_server:info,2014-08-19T16:50:03.892,ns_1@10.242.238.88:<0.392.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 471 to state replica [ns_server:debug,2014-08-19T16:50:03.900,ns_1@10.242.238.88:<0.31389.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 473 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.399.1> [ns_server:info,2014-08-19T16:50:03.902,ns_1@10.242.238.88:<0.399.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 473 to state replica [ns_server:debug,2014-08-19T16:50:03.907,ns_1@10.242.238.88:<0.31031.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 477 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.411.1> [ns_server:info,2014-08-19T16:50:03.909,ns_1@10.242.238.88:<0.411.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 477 to state replica [ns_server:debug,2014-08-19T16:50:03.918,ns_1@10.242.238.88:<0.31234.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 475 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.412.1> [ns_server:info,2014-08-19T16:50:03.919,ns_1@10.242.238.88:<0.412.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 475 to state replica [ns_server:debug,2014-08-19T16:50:03.919,ns_1@10.242.238.88:<0.341.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_487 [ns_server:debug,2014-08-19T16:50:03.920,ns_1@10.242.238.88:<0.30877.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 479 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.413.1> [ns_server:debug,2014-08-19T16:50:03.921,ns_1@10.242.238.88:<0.31634.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 470 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.414.1> [ns_server:info,2014-08-19T16:50:03.921,ns_1@10.242.238.88:<0.413.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 479 to state replica [ns_server:debug,2014-08-19T16:50:03.925,ns_1@10.242.238.88:<0.30030.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 1000 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.415.1> [rebalance:info,2014-08-19T16:50:03.925,ns_1@10.242.238.88:<0.341.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[487]}, {checkpoints,[{487,1}]}, {name,<<"rebalance_487">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[487]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"487"}]} [ns_server:info,2014-08-19T16:50:03.925,ns_1@10.242.238.88:<0.414.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 470 to state replica [rebalance:debug,2014-08-19T16:50:03.926,ns_1@10.242.238.88:<0.341.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.439.1> [ns_server:info,2014-08-19T16:50:03.926,ns_1@10.242.238.88:<0.415.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 1000 to state replica [ns_server:debug,2014-08-19T16:50:03.933,ns_1@10.242.238.88:<0.355.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_998 [ns_server:debug,2014-08-19T16:50:03.934,ns_1@10.242.238.88:<0.31480.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 472 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.416.1> [ns_server:debug,2014-08-19T16:50:03.934,ns_1@10.242.238.88:<0.31311.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 474 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.417.1> [ns_server:debug,2014-08-19T16:50:03.935,ns_1@10.242.238.88:<0.30569.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 483 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.418.1> [ns_server:debug,2014-08-19T16:50:03.940,ns_1@10.242.238.88:<0.30723.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 481 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.419.1> [rebalance:info,2014-08-19T16:50:03.941,ns_1@10.242.238.88:<0.341.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [views:debug,2014-08-19T16:50:03.941,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/763. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:03.949,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",763,active,0} [ns_server:debug,2014-08-19T16:50:03.950,ns_1@10.242.238.88:<0.31522.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 727 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.421.1> [ns_server:debug,2014-08-19T16:50:03.950,ns_1@10.242.238.88:<0.31152.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 476 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.420.1> [ns_server:debug,2014-08-19T16:50:03.951,ns_1@10.242.238.88:<0.30400.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 485 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.422.1> [ns_server:debug,2014-08-19T16:50:03.956,ns_1@10.242.238.88:<0.30996.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 733 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.424.1> [ns_server:debug,2014-08-19T16:50:03.956,ns_1@10.242.238.88:<0.30842.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 735 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.425.1> [ns_server:debug,2014-08-19T16:50:03.956,ns_1@10.242.238.88:<0.30646.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 482 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.426.1> [ns_server:debug,2014-08-19T16:50:03.958,ns_1@10.242.238.88:<0.31368.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 729 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.423.1> [ns_server:debug,2014-08-19T16:50:03.959,ns_1@10.242.238.88:<0.366.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_489 [ns_server:info,2014-08-19T16:50:03.962,ns_1@10.242.238.88:<0.417.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 474 to state replica [ns_server:debug,2014-08-19T16:50:03.962,ns_1@10.242.238.88:<0.30940.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 478 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.428.1> [ns_server:info,2014-08-19T16:50:03.962,ns_1@10.242.238.88:<0.416.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 472 to state replica [ns_server:debug,2014-08-19T16:50:03.962,ns_1@10.242.238.88:<0.30800.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 480 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.427.1> [ns_server:info,2014-08-19T16:50:03.962,ns_1@10.242.238.88:<0.418.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 483 to state replica [ns_server:info,2014-08-19T16:50:03.962,ns_1@10.242.238.88:<0.419.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 481 to state replica [ns_server:debug,2014-08-19T16:50:03.962,ns_1@10.242.238.88:<0.31208.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 731 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.429.1> [ns_server:debug,2014-08-19T16:50:03.962,ns_1@10.242.238.88:<0.30481.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 484 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.431.1> [ns_server:debug,2014-08-19T16:50:03.962,ns_1@10.242.238.88:<0.31445.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 728 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.433.1> [ns_server:debug,2014-08-19T16:50:03.962,ns_1@10.242.238.88:<0.30360.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 741 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.432.1> [ns_server:debug,2014-08-19T16:50:03.963,ns_1@10.242.238.88:<0.30702.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 737 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.434.1> [ns_server:debug,2014-08-19T16:50:03.963,ns_1@10.242.238.88:<0.30533.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 739 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.436.1> [ns_server:debug,2014-08-19T16:50:03.963,ns_1@10.242.238.88:<0.31290.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 730 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.437.1> [ns_server:debug,2014-08-19T16:50:03.963,ns_1@10.242.238.88:<0.31613.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 726 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.435.1> [ns_server:debug,2014-08-19T16:50:03.963,ns_1@10.242.238.88:<0.31087.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 732 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.438.1> [ns_server:debug,2014-08-19T16:50:03.963,ns_1@10.242.238.88:<0.30919.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 734 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.440.1> [ns_server:debug,2014-08-19T16:50:03.963,ns_1@10.242.238.88:<0.30625.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 738 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.441.1> [ns_server:debug,2014-08-19T16:50:03.963,ns_1@10.242.238.88:<0.30779.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 736 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.442.1> [ns_server:debug,2014-08-19T16:50:03.963,ns_1@10.242.238.88:<0.30283.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 742 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.443.1> [ns_server:debug,2014-08-19T16:50:03.963,ns_1@10.242.238.88:<0.30446.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 740 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.444.1> [ns_server:debug,2014-08-19T16:50:03.963,ns_1@10.242.238.88:<0.31578.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 981 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.445.1> [ns_server:debug,2014-08-19T16:50:03.963,ns_1@10.242.238.88:<0.31269.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 985 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.446.1> [ns_server:info,2014-08-19T16:50:03.966,ns_1@10.242.238.88:<0.422.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 485 to state replica [rebalance:info,2014-08-19T16:50:03.966,ns_1@10.242.238.88:<0.355.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[998]}, {checkpoints,[{998,1}]}, {name,<<"rebalance_998">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[998]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"998"}]} [ns_server:info,2014-08-19T16:50:03.966,ns_1@10.242.238.88:<0.420.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 476 to state replica [ns_server:debug,2014-08-19T16:50:03.967,ns_1@10.242.238.88:<0.30304.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 486 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.430.1> [ns_server:debug,2014-08-19T16:50:03.967,ns_1@10.242.238.88:<0.31424.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 983 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.447.1> [ns_server:debug,2014-08-19T16:50:03.967,ns_1@10.242.238.88:<0.31052.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 987 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.448.1> [ns_server:debug,2014-08-19T16:50:03.967,ns_1@10.242.238.88:<0.30590.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 993 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.449.1> [ns_server:debug,2014-08-19T16:50:03.967,ns_1@10.242.238.88:<0.30898.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 989 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.450.1> [ns_server:debug,2014-08-19T16:50:03.968,ns_1@10.242.238.88:<0.30744.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 991 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.452.1> [ns_server:debug,2014-08-19T16:50:03.968,ns_1@10.242.238.88:<0.30975.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 988 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.454.1> [ns_server:debug,2014-08-19T16:50:03.968,ns_1@10.242.238.88:<0.31347.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 984 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.453.1> [ns_server:debug,2014-08-19T16:50:03.968,ns_1@10.242.238.88:<0.31501.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 982 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.457.1> [ns_server:debug,2014-08-19T16:50:03.968,ns_1@10.242.238.88:<0.30512.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 994 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.460.1> [ns_server:debug,2014-08-19T16:50:03.968,ns_1@10.242.238.88:<0.30262.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 997 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.456.1> [ns_server:debug,2014-08-19T16:50:03.968,ns_1@10.242.238.88:<0.30821.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 990 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.458.1> [ns_server:debug,2014-08-19T16:50:03.968,ns_1@10.242.238.88:<0.30339.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 996 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.461.1> [ns_server:debug,2014-08-19T16:50:03.968,ns_1@10.242.238.88:<0.31655.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 980 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.459.1> [ns_server:debug,2014-08-19T16:50:03.969,ns_1@10.242.238.88:<0.30667.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 992 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.462.1> [ns_server:info,2014-08-19T16:50:03.969,ns_1@10.242.238.88:<0.421.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 727 to state replica [ns_server:info,2014-08-19T16:50:03.969,ns_1@10.242.238.88:<0.426.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 482 to state replica [ns_server:debug,2014-08-19T16:50:03.969,ns_1@10.242.238.88:<0.31187.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 986 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.451.1> [ns_server:info,2014-08-19T16:50:03.969,ns_1@10.242.238.88:<0.423.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 729 to state replica [ns_server:info,2014-08-19T16:50:03.969,ns_1@10.242.238.88:<0.424.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 733 to state replica [ns_server:info,2014-08-19T16:50:03.969,ns_1@10.242.238.88:<0.428.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 478 to state replica [ns_server:info,2014-08-19T16:50:03.970,ns_1@10.242.238.88:<0.425.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 735 to state replica [ns_server:info,2014-08-19T16:50:03.970,ns_1@10.242.238.88:<0.433.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 728 to state replica [ns_server:info,2014-08-19T16:50:03.970,ns_1@10.242.238.88:<0.445.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 981 to state replica [ns_server:info,2014-08-19T16:50:03.970,ns_1@10.242.238.88:<0.443.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 742 to state replica [ns_server:info,2014-08-19T16:50:03.970,ns_1@10.242.238.88:<0.432.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 741 to state replica [ns_server:info,2014-08-19T16:50:03.970,ns_1@10.242.238.88:<0.438.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 732 to state replica [ns_server:info,2014-08-19T16:50:03.970,ns_1@10.242.238.88:<0.434.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 737 to state replica [rebalance:debug,2014-08-19T16:50:03.970,ns_1@10.242.238.88:<0.355.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.463.1> [ns_server:info,2014-08-19T16:50:03.970,ns_1@10.242.238.88:<0.441.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 738 to state replica [ns_server:info,2014-08-19T16:50:03.970,ns_1@10.242.238.88:<0.446.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 985 to state replica [ns_server:info,2014-08-19T16:50:03.970,ns_1@10.242.238.88:<0.440.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 734 to state replica [ns_server:info,2014-08-19T16:50:03.970,ns_1@10.242.238.88:<0.444.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 740 to state replica [ns_server:info,2014-08-19T16:50:03.970,ns_1@10.242.238.88:<0.436.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 739 to state replica [ns_server:info,2014-08-19T16:50:03.970,ns_1@10.242.238.88:<0.435.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 726 to state replica [ns_server:info,2014-08-19T16:50:03.970,ns_1@10.242.238.88:<0.442.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 736 to state replica [ns_server:info,2014-08-19T16:50:03.970,ns_1@10.242.238.88:<0.429.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 731 to state replica [ns_server:debug,2014-08-19T16:50:03.970,ns_1@10.242.238.88:<0.30425.0>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 995 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.455.1> [rebalance:info,2014-08-19T16:50:03.974,ns_1@10.242.238.88:<0.366.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[489]}, {checkpoints,[{489,1}]}, {name,<<"rebalance_489">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[489]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"489"}]} [rebalance:debug,2014-08-19T16:50:03.975,ns_1@10.242.238.88:<0.341.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:03.975,ns_1@10.242.238.88:<0.341.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:50:03.975,ns_1@10.242.238.88:<0.427.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 480 to state replica [ns_server:info,2014-08-19T16:50:03.976,ns_1@10.242.238.88:<0.431.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 484 to state replica [ns_server:info,2014-08-19T16:50:03.976,ns_1@10.242.238.88:<0.437.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 730 to state replica [rebalance:debug,2014-08-19T16:50:03.976,ns_1@10.242.238.88:<0.366.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.464.1> [ns_server:info,2014-08-19T16:50:03.976,ns_1@10.242.238.88:<0.455.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 995 to state replica [ns_server:info,2014-08-19T16:50:03.976,ns_1@10.242.238.88:<0.458.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 990 to state replica [ns_server:info,2014-08-19T16:50:03.976,ns_1@10.242.238.88:<0.448.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 987 to state replica [ns_server:info,2014-08-19T16:50:03.976,ns_1@10.242.238.88:<0.460.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 994 to state replica [ns_server:info,2014-08-19T16:50:03.976,ns_1@10.242.238.88:<0.430.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 486 to state replica [ns_server:info,2014-08-19T16:50:03.976,ns_1@10.242.238.88:<0.456.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 997 to state replica [ns_server:info,2014-08-19T16:50:03.976,ns_1@10.242.238.88:<0.461.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 996 to state replica [ns_server:info,2014-08-19T16:50:03.976,ns_1@10.242.238.88:<0.450.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 989 to state replica [ns_server:info,2014-08-19T16:50:03.976,ns_1@10.242.238.88:<0.453.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 984 to state replica [ns_server:info,2014-08-19T16:50:03.976,ns_1@10.242.238.88:<0.452.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 991 to state replica [ns_server:info,2014-08-19T16:50:03.976,ns_1@10.242.238.88:<0.449.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 993 to state replica [ns_server:info,2014-08-19T16:50:03.976,ns_1@10.242.238.88:<0.447.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 983 to state replica [ns_server:info,2014-08-19T16:50:03.976,ns_1@10.242.238.88:<0.457.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 982 to state replica [ns_server:info,2014-08-19T16:50:03.976,ns_1@10.242.238.88:<0.462.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 992 to state replica [ns_server:info,2014-08-19T16:50:03.976,ns_1@10.242.238.88:<0.459.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 980 to state replica [ns_server:info,2014-08-19T16:50:03.976,ns_1@10.242.238.88:<0.454.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 988 to state replica [rebalance:info,2014-08-19T16:50:03.976,ns_1@10.242.238.88:<0.355.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:03.977,ns_1@10.242.238.88:<0.30227.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 487 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:info,2014-08-19T16:50:03.977,ns_1@10.242.238.88:<0.451.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 986 to state replica [rebalance:info,2014-08-19T16:50:03.977,ns_1@10.242.238.88:<0.366.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:03.979,ns_1@10.242.238.88:<0.355.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:debug,2014-08-19T16:50:03.979,ns_1@10.242.238.88:<0.30235.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:03.979,ns_1@10.242.238.88:<0.355.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:debug,2014-08-19T16:50:03.980,ns_1@10.242.238.88:<0.366.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:03.980,ns_1@10.242.238.88:<0.366.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:03.981,ns_1@10.242.238.88:<0.30185.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 998 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:03.981,ns_1@10.242.238.88:<0.392.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_471 [rebalance:info,2014-08-19T16:50:03.984,ns_1@10.242.238.88:<0.30086.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 489 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:03.984,ns_1@10.242.238.88:<0.30193.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:03.985,ns_1@10.242.238.88:<0.30235.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_487_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.985,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 487 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:03.985,ns_1@10.242.238.88:<0.468.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 487 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:info,2014-08-19T16:50:03.985,ns_1@10.242.238.88:<0.392.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[471]}, {checkpoints,[{471,1}]}, {name,<<"rebalance_471">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[471]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"471"}]} [rebalance:debug,2014-08-19T16:50:03.986,ns_1@10.242.238.88:<0.392.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.469.1> [rebalance:debug,2014-08-19T16:50:03.986,ns_1@10.242.238.88:<0.30094.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:03.987,ns_1@10.242.238.88:<0.392.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:50:03.987,ns_1@10.242.238.88:<0.30193.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_998_'ns_1@10.242.238.90'">>] [rebalance:debug,2014-08-19T16:50:03.988,ns_1@10.242.238.88:<0.392.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:03.989,ns_1@10.242.238.88:<0.392.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:50:03.989,ns_1@10.242.238.88:<0.30094.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_489_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.990,ns_1@10.242.238.88:<0.31557.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 471 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:03.991,ns_1@10.242.238.88:<0.31565.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:03.993,ns_1@10.242.238.88:<0.412.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_475 [ns_server:info,2014-08-19T16:50:03.994,ns_1@10.242.238.88:<0.31565.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_471_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:03.994,ns_1@10.242.238.88:<0.412.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[475]}, {checkpoints,[{475,1}]}, {name,<<"rebalance_475">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[475]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"475"}]} [rebalance:debug,2014-08-19T16:50:03.995,ns_1@10.242.238.88:<0.412.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.476.1> [ns_server:debug,2014-08-19T16:50:03.995,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.996,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.997,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:50:03.997,ns_1@10.242.238.88:<0.412.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:03.997,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:03.997,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{487, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:03.998,ns_1@10.242.238.88:<0.412.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:03.998,ns_1@10.242.238.88:<0.412.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:03.999,ns_1@10.242.238.88:<0.31234.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 475 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:04.000,ns_1@10.242.238.88:<0.415.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_1000 [rebalance:debug,2014-08-19T16:50:04.001,ns_1@10.242.238.88:<0.31242.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:04.002,ns_1@10.242.238.88:<0.415.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[1000]}, {checkpoints,[{1000,1}]}, {name,<<"rebalance_1000">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[1000]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"1000"}]} [rebalance:debug,2014-08-19T16:50:04.002,ns_1@10.242.238.88:<0.415.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.488.1> [rebalance:info,2014-08-19T16:50:04.003,ns_1@10.242.238.88:<0.415.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:50:04.004,ns_1@10.242.238.88:<0.31242.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_475_'ns_1@10.242.238.91'">>] [rebalance:debug,2014-08-19T16:50:04.005,ns_1@10.242.238.88:<0.415.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.005,ns_1@10.242.238.88:<0.415.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.006,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 487 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:50:04.006,ns_1@10.242.238.88:<0.30030.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 1000 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:04.006,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 487) [ns_server:debug,2014-08-19T16:50:04.007,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.007,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 998 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:04.007,ns_1@10.242.238.88:<0.504.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 998 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:debug,2014-08-19T16:50:04.008,ns_1@10.242.238.88:<0.30038.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:04.012,ns_1@10.242.238.88:<0.30038.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_1000_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:04.016,ns_1@10.242.238.88:<0.414.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_470 [rebalance:info,2014-08-19T16:50:04.017,ns_1@10.242.238.88:<0.414.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[470]}, {checkpoints,[{470,1}]}, {name,<<"rebalance_470">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[470]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"470"}]} [rebalance:debug,2014-08-19T16:50:04.018,ns_1@10.242.238.88:<0.414.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.507.1> [rebalance:info,2014-08-19T16:50:04.019,ns_1@10.242.238.88:<0.414.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.020,ns_1@10.242.238.88:<0.414.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.020,ns_1@10.242.238.88:<0.414.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.021,ns_1@10.242.238.88:<0.31634.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 470 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.023,ns_1@10.242.238.88:<0.31642.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.027,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:04.027,ns_1@10.242.238.88:<0.31642.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_470_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:04.028,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{998, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.028,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.029,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.029,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:04.034,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 998 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.035,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 998) [ns_server:debug,2014-08-19T16:50:04.037,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.037,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 489 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:04.037,ns_1@10.242.238.88:<0.520.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 489 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:04.038,ns_1@10.242.238.88:<0.413.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_479 [rebalance:info,2014-08-19T16:50:04.040,ns_1@10.242.238.88:<0.413.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[479]}, {checkpoints,[{479,1}]}, {name,<<"rebalance_479">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[479]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"479"}]} [rebalance:debug,2014-08-19T16:50:04.041,ns_1@10.242.238.88:<0.413.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.521.1> [ns_server:debug,2014-08-19T16:50:04.041,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 761. Nacking mccouch update. [views:debug,2014-08-19T16:50:04.041,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/761. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:04.042,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",761,active,0} [rebalance:info,2014-08-19T16:50:04.042,ns_1@10.242.238.88:<0.413.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.044,ns_1@10.242.238.88:<0.413.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.044,ns_1@10.242.238.88:<0.413.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:04.043,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829, 646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,777, 722,594,228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670, 542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618, 252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694,566,200, 1005,928,800,434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148, 876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773,718,590,224, 952,824,458,330,875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900, 772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976, 848,482,354,899,771,716,588,222,950,822,767,456,328,873,690,562,196,1001,924, 796,430,302,975,847,664,536,170,898,770,404,276,949,821,766,638,144,872,506, 378,1000,923,795,740,612,246,118,974,846,480,352,897,769,714,586,220,948,820, 765,454,326,999,935,871,807,752,688,624,560,194,130,986,922,858,794,492,428, 364,300,973,909,845,781,726,662,598,534,232,168,960,896,832,768,466,402,338, 274,947,883,819,764,700,636,572,206,142,1011,998,934,870,806,504,440,376,312, 985,921,857,793,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286, 959,895,831,712,648,584,520,218,154,1023,946,882,818,763,452,388,324,260, 1010,997,869,686,558,192,920,792,426,298,971,843,660,532,166,894,400,272, 1022,945,817,762,634,140,996,868,502,374,919,791,736,608,242,114,970,842,476, 348,893,710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790,424, 296,969,841,658,530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372, 917,789,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,448,320, 993,865,682,554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941, 813,758,630,136,992,864,498,370,915,787,732,604,238,110,966,838,472,344,889, 706,578,212,1017,940,812,446,318,991,863,680,552,186,914,786,420,292,965,837, 654,526,160,888,394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730, 602,236,108,964,836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678, 550,184,912,784,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626, 132,988,860,494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013, 936,808,442,314,987,859,676,548,182,910,782,416,288,961,833,650,522,156,884, 390,262,1012] [rebalance:info,2014-08-19T16:50:04.045,ns_1@10.242.238.88:<0.30877.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 479 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.046,ns_1@10.242.238.88:<0.30885.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.047,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.048,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{489, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.048,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.049,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.052,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.052,ns_1@10.242.238.88:<0.399.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_473 [rebalance:info,2014-08-19T16:50:04.053,ns_1@10.242.238.88:<0.399.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[473]}, {checkpoints,[{473,1}]}, {name,<<"rebalance_473">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[473]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"473"}]} [ns_server:info,2014-08-19T16:50:04.054,ns_1@10.242.238.88:<0.30885.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_479_'ns_1@10.242.238.91'">>] [rebalance:debug,2014-08-19T16:50:04.054,ns_1@10.242.238.88:<0.399.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.532.1> [rebalance:info,2014-08-19T16:50:04.056,ns_1@10.242.238.88:<0.399.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.059,ns_1@10.242.238.88:<0.399.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.059,ns_1@10.242.238.88:<0.399.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.060,ns_1@10.242.238.88:<0.31389.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 473 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.062,ns_1@10.242.238.88:<0.31397.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:04.063,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 489 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.064,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 489) [ns_server:debug,2014-08-19T16:50:04.064,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.064,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 471 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:04.065,ns_1@10.242.238.88:<0.535.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 471 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:info,2014-08-19T16:50:04.066,ns_1@10.242.238.88:<0.31397.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_473_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:04.069,ns_1@10.242.238.88:<0.411.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_477 [rebalance:info,2014-08-19T16:50:04.070,ns_1@10.242.238.88:<0.411.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[477]}, {checkpoints,[{477,1}]}, {name,<<"rebalance_477">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[477]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"477"}]} [rebalance:debug,2014-08-19T16:50:04.071,ns_1@10.242.238.88:<0.411.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.538.1> [rebalance:info,2014-08-19T16:50:04.071,ns_1@10.242.238.88:<0.411.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.073,ns_1@10.242.238.88:<0.411.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.073,ns_1@10.242.238.88:<0.411.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.074,ns_1@10.242.238.88:<0.31031.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 477 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [views:debug,2014-08-19T16:50:04.075,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/761. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:04.075,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.075,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",761,active,0} [rebalance:debug,2014-08-19T16:50:04.076,ns_1@10.242.238.88:<0.31039.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.077,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{471, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.078,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:info,2014-08-19T16:50:04.080,ns_1@10.242.238.88:<0.31039.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_477_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:04.080,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.081,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:04.085,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 471 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.085,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 471) [ns_server:debug,2014-08-19T16:50:04.086,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.086,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 475 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:04.086,ns_1@10.242.238.88:<0.551.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 475 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:04.090,ns_1@10.242.238.88:<0.455.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_995 [rebalance:info,2014-08-19T16:50:04.091,ns_1@10.242.238.88:<0.455.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[995]}, {checkpoints,[{995,1}]}, {name,<<"rebalance_995">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[995]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"995"}]} [rebalance:debug,2014-08-19T16:50:04.092,ns_1@10.242.238.88:<0.455.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.552.1> [rebalance:info,2014-08-19T16:50:04.093,ns_1@10.242.238.88:<0.455.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.094,ns_1@10.242.238.88:<0.455.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.094,ns_1@10.242.238.88:<0.455.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:04.096,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:04.096,ns_1@10.242.238.88:<0.30425.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 995 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:04.096,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.097,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.097,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{475, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.097,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:04.098,ns_1@10.242.238.88:<0.30433.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:04.101,ns_1@10.242.238.88:<0.30433.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_995_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:04.104,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 475 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.104,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 475) [ns_server:debug,2014-08-19T16:50:04.105,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.105,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 1000 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:04.105,ns_1@10.242.238.88:<0.564.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 1000 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:04.107,ns_1@10.242.238.88:<0.448.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_987 [rebalance:info,2014-08-19T16:50:04.108,ns_1@10.242.238.88:<0.448.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[987]}, {checkpoints,[{987,1}]}, {name,<<"rebalance_987">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[987]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"987"}]} [rebalance:debug,2014-08-19T16:50:04.109,ns_1@10.242.238.88:<0.448.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.565.1> [rebalance:info,2014-08-19T16:50:04.110,ns_1@10.242.238.88:<0.448.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.112,ns_1@10.242.238.88:<0.448.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.112,ns_1@10.242.238.88:<0.448.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.113,ns_1@10.242.238.88:<0.31052.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 987 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.115,ns_1@10.242.238.88:<0.31060.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:04.118,ns_1@10.242.238.88:<0.31060.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_987_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:04.121,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.121,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.121,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.122,ns_1@10.242.238.88:<0.444.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_740 [ns_server:debug,2014-08-19T16:50:04.122,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.122,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{1000, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:04.123,ns_1@10.242.238.88:<0.444.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[740]}, {checkpoints,[{740,1}]}, {name,<<"rebalance_740">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[740]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"740"}]} [rebalance:debug,2014-08-19T16:50:04.125,ns_1@10.242.238.88:<0.444.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.576.1> [rebalance:info,2014-08-19T16:50:04.126,ns_1@10.242.238.88:<0.444.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.128,ns_1@10.242.238.88:<0.444.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.128,ns_1@10.242.238.88:<0.444.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.129,ns_1@10.242.238.88:<0.30446.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 740 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:04.129,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 1000 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.130,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 1000) [rebalance:debug,2014-08-19T16:50:04.131,ns_1@10.242.238.88:<0.30454.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.131,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.131,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 470 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:04.131,ns_1@10.242.238.88:<0.579.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 470 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:info,2014-08-19T16:50:04.134,ns_1@10.242.238.88:<0.30454.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_740_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:04.141,ns_1@10.242.238.88:<0.432.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_741 [ns_server:debug,2014-08-19T16:50:04.142,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.142,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:50:04.142,ns_1@10.242.238.88:<0.432.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[741]}, {checkpoints,[{741,1}]}, {name,<<"rebalance_741">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[741]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"741"}]} [ns_server:debug,2014-08-19T16:50:04.143,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{470, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:04.143,ns_1@10.242.238.88:<0.432.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.584.1> [ns_server:debug,2014-08-19T16:50:04.144,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.144,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:04.145,ns_1@10.242.238.88:<0.432.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.147,ns_1@10.242.238.88:<0.432.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.147,ns_1@10.242.238.88:<0.432.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.148,ns_1@10.242.238.88:<0.30360.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 741 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.150,ns_1@10.242.238.88:<0.30368.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:04.154,ns_1@10.242.238.88:<0.30368.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_741_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:04.154,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 470 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.154,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 470) [ns_server:debug,2014-08-19T16:50:04.155,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.155,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 479 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:04.155,ns_1@10.242.238.88:<0.609.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 479 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:04.158,ns_1@10.242.238.88:<0.445.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_981 [rebalance:info,2014-08-19T16:50:04.160,ns_1@10.242.238.88:<0.445.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[981]}, {checkpoints,[{981,1}]}, {name,<<"rebalance_981">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[981]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"981"}]} [rebalance:debug,2014-08-19T16:50:04.160,ns_1@10.242.238.88:<0.445.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.610.1> [rebalance:info,2014-08-19T16:50:04.161,ns_1@10.242.238.88:<0.445.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.162,ns_1@10.242.238.88:<0.445.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.163,ns_1@10.242.238.88:<0.445.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.163,ns_1@10.242.238.88:<0.31578.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 981 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:04.165,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.165,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:debug,2014-08-19T16:50:04.166,ns_1@10.242.238.88:<0.31586.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.166,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.166,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{479, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.166,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:04.169,ns_1@10.242.238.88:<0.31586.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_981_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:04.173,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 479 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.173,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 479) [ns_server:debug,2014-08-19T16:50:04.174,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.174,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 473 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:04.174,ns_1@10.242.238.88:<0.622.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 473 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:04.176,ns_1@10.242.238.88:<0.462.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_992 [rebalance:info,2014-08-19T16:50:04.177,ns_1@10.242.238.88:<0.462.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[992]}, {checkpoints,[{992,1}]}, {name,<<"rebalance_992">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[992]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"992"}]} [rebalance:debug,2014-08-19T16:50:04.178,ns_1@10.242.238.88:<0.462.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.623.1> [rebalance:info,2014-08-19T16:50:04.180,ns_1@10.242.238.88:<0.462.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.181,ns_1@10.242.238.88:<0.462.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.181,ns_1@10.242.238.88:<0.462.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.183,ns_1@10.242.238.88:<0.30667.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 992 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:04.188,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:04.188,ns_1@10.242.238.88:<0.30675.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.189,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.189,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{473, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.189,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.189,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.190,ns_1@10.242.238.88:<0.459.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_980 [ns_server:debug,2014-08-19T16:50:04.192,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 759. Nacking mccouch update. [ns_server:info,2014-08-19T16:50:04.192,ns_1@10.242.238.88:<0.30675.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_992_'ns_1@10.242.238.90'">>] [views:debug,2014-08-19T16:50:04.192,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/759. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:04.192,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",759,active,0} [rebalance:info,2014-08-19T16:50:04.193,ns_1@10.242.238.88:<0.459.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[980]}, {checkpoints,[{980,1}]}, {name,<<"rebalance_980">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[980]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"980"}]} [rebalance:debug,2014-08-19T16:50:04.194,ns_1@10.242.238.88:<0.459.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.634.1> [ns_server:debug,2014-08-19T16:50:04.194,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829, 646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,777, 722,594,228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670, 542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618, 252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694,566,200, 1005,928,800,434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148, 876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773,718,590,224, 952,824,458,330,875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900, 772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976, 848,482,354,899,771,716,588,222,950,822,767,456,328,873,690,562,196,1001,924, 796,430,302,975,847,664,536,170,898,770,404,276,949,821,766,638,144,872,506, 378,1000,923,795,740,612,246,118,974,846,480,352,897,769,714,586,220,948,820, 765,454,326,999,871,688,560,194,986,922,858,794,492,428,364,300,973,909,845, 781,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,819,764, 700,636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921,857,793,738, 674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,895,831,712,648, 584,520,218,154,1023,946,882,818,763,452,388,324,260,1010,997,869,686,558, 192,920,792,426,298,971,843,660,532,166,894,400,272,1022,945,817,762,634,140, 996,868,502,374,919,791,736,608,242,114,970,842,476,348,893,710,582,216,1021, 944,816,761,450,322,995,867,684,556,190,918,790,424,296,969,841,658,530,164, 892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606,240,112, 968,840,474,346,891,708,580,214,1019,942,814,759,448,320,993,865,682,554,188, 916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992, 864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940, 812,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888,394, 266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784,418, 290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494,366, 911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442,314,987, 859,676,548,182,910,782,416,288,961,833,650,522,156,884,390,262,1012,935,807, 752,624,130] [rebalance:info,2014-08-19T16:50:04.195,ns_1@10.242.238.88:<0.459.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.197,ns_1@10.242.238.88:<0.459.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.197,ns_1@10.242.238.88:<0.459.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.198,ns_1@10.242.238.88:<0.31655.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 980 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:04.198,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 473 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.199,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 473) [ns_server:debug,2014-08-19T16:50:04.200,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.200,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 477 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:04.200,ns_1@10.242.238.88:<0.637.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 477 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:debug,2014-08-19T16:50:04.201,ns_1@10.242.238.88:<0.31669.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:04.204,ns_1@10.242.238.88:<0.31669.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_980_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:04.206,ns_1@10.242.238.88:<0.454.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_988 [rebalance:info,2014-08-19T16:50:04.207,ns_1@10.242.238.88:<0.454.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[988]}, {checkpoints,[{988,1}]}, {name,<<"rebalance_988">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[988]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"988"}]} [rebalance:debug,2014-08-19T16:50:04.208,ns_1@10.242.238.88:<0.454.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.640.1> [rebalance:info,2014-08-19T16:50:04.209,ns_1@10.242.238.88:<0.454.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:04.211,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:04.211,ns_1@10.242.238.88:<0.454.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.211,ns_1@10.242.238.88:<0.454.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:04.212,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.212,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:50:04.212,ns_1@10.242.238.88:<0.30975.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 988 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:04.212,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{477, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.213,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:04.214,ns_1@10.242.238.88:<0.30983.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:04.217,ns_1@10.242.238.88:<0.30983.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_988_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:04.218,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 477 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.219,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 477) [ns_server:debug,2014-08-19T16:50:04.220,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.220,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 995 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:04.220,ns_1@10.242.238.88:<0.653.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 995 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:04.223,ns_1@10.242.238.88:<0.447.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_983 [rebalance:info,2014-08-19T16:50:04.224,ns_1@10.242.238.88:<0.447.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[983]}, {checkpoints,[{983,1}]}, {name,<<"rebalance_983">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[983]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"983"}]} [rebalance:debug,2014-08-19T16:50:04.225,ns_1@10.242.238.88:<0.447.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.654.1> [rebalance:info,2014-08-19T16:50:04.226,ns_1@10.242.238.88:<0.447.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.227,ns_1@10.242.238.88:<0.447.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.227,ns_1@10.242.238.88:<0.447.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.228,ns_1@10.242.238.88:<0.31424.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 983 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.230,ns_1@10.242.238.88:<0.31432.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:04.234,ns_1@10.242.238.88:<0.31432.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_983_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:04.237,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.238,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.238,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{995, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.239,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.239,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.240,ns_1@10.242.238.88:<0.457.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_982 [rebalance:info,2014-08-19T16:50:04.242,ns_1@10.242.238.88:<0.457.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[982]}, {checkpoints,[{982,1}]}, {name,<<"rebalance_982">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[982]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"982"}]} [rebalance:debug,2014-08-19T16:50:04.243,ns_1@10.242.238.88:<0.457.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.664.1> [views:debug,2014-08-19T16:50:04.243,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/759. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:04.243,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",759,active,0} [rebalance:info,2014-08-19T16:50:04.244,ns_1@10.242.238.88:<0.457.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:04.245,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 995 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.246,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 995) [ns_server:debug,2014-08-19T16:50:04.247,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.247,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 987 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:debug,2014-08-19T16:50:04.247,ns_1@10.242.238.88:<0.457.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.247,ns_1@10.242.238.88:<0.668.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 987 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:50:04.247,ns_1@10.242.238.88:<0.457.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.248,ns_1@10.242.238.88:<0.31501.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 982 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.250,ns_1@10.242.238.88:<0.31509.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:04.256,ns_1@10.242.238.88:<0.31509.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_982_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:04.259,ns_1@10.242.238.88:<0.452.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_991 [rebalance:info,2014-08-19T16:50:04.263,ns_1@10.242.238.88:<0.452.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[991]}, {checkpoints,[{991,1}]}, {name,<<"rebalance_991">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[991]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"991"}]} [rebalance:debug,2014-08-19T16:50:04.264,ns_1@10.242.238.88:<0.452.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.671.1> [rebalance:info,2014-08-19T16:50:04.265,ns_1@10.242.238.88:<0.452.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.266,ns_1@10.242.238.88:<0.452.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.266,ns_1@10.242.238.88:<0.452.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:04.267,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:04.267,ns_1@10.242.238.88:<0.30744.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 991 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:04.268,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.268,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.268,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{987, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.269,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:04.269,ns_1@10.242.238.88:<0.30752.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:04.273,ns_1@10.242.238.88:<0.30752.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_991_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:04.275,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 987 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.275,ns_1@10.242.238.88:<0.453.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_984 [ns_server:debug,2014-08-19T16:50:04.275,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 987) [ns_server:debug,2014-08-19T16:50:04.276,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.276,ns_1@10.242.238.88:<0.453.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[984]}, {checkpoints,[{984,1}]}, {name,<<"rebalance_984">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[984]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"984"}]} [rebalance:info,2014-08-19T16:50:04.276,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 740 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:04.277,ns_1@10.242.238.88:<0.683.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 740 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:debug,2014-08-19T16:50:04.277,ns_1@10.242.238.88:<0.453.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.684.1> [rebalance:info,2014-08-19T16:50:04.278,ns_1@10.242.238.88:<0.453.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.279,ns_1@10.242.238.88:<0.453.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.279,ns_1@10.242.238.88:<0.453.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.280,ns_1@10.242.238.88:<0.31347.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 984 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:04.288,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:04.288,ns_1@10.242.238.88:<0.31355.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.289,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.289,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.289,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.289,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{740, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.290,ns_1@10.242.238.88:<0.450.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_989 [ns_server:info,2014-08-19T16:50:04.291,ns_1@10.242.238.88:<0.31355.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_984_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:04.292,ns_1@10.242.238.88:<0.450.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[989]}, {checkpoints,[{989,1}]}, {name,<<"rebalance_989">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[989]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"989"}]} [rebalance:debug,2014-08-19T16:50:04.293,ns_1@10.242.238.88:<0.450.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.695.1> [rebalance:info,2014-08-19T16:50:04.297,ns_1@10.242.238.88:<0.450.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.299,ns_1@10.242.238.88:<0.450.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.299,ns_1@10.242.238.88:<0.450.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.300,ns_1@10.242.238.88:<0.30898.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 989 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:04.300,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 740 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.301,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 740) [ns_server:debug,2014-08-19T16:50:04.301,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:debug,2014-08-19T16:50:04.301,ns_1@10.242.238.88:<0.30906.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:04.302,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 741 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:04.302,ns_1@10.242.238.88:<0.698.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 741 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:info,2014-08-19T16:50:04.304,ns_1@10.242.238.88:<0.30906.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_989_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:04.311,ns_1@10.242.238.88:<0.449.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_993 [ns_server:debug,2014-08-19T16:50:04.312,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:04.313,ns_1@10.242.238.88:<0.449.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[993]}, {checkpoints,[{993,1}]}, {name,<<"rebalance_993">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[993]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"993"}]} [ns_server:debug,2014-08-19T16:50:04.313,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.313,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{741, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:04.314,ns_1@10.242.238.88:<0.449.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.704.1> [ns_server:debug,2014-08-19T16:50:04.314,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.314,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:04.314,ns_1@10.242.238.88:<0.449.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.316,ns_1@10.242.238.88:<0.449.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.316,ns_1@10.242.238.88:<0.449.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.317,ns_1@10.242.238.88:<0.30590.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 993 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.319,ns_1@10.242.238.88:<0.30598.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:04.320,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 741 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.321,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 741) [ns_server:debug,2014-08-19T16:50:04.321,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.322,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 981 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [ns_server:info,2014-08-19T16:50:04.322,ns_1@10.242.238.88:<0.30598.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_993_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:04.322,ns_1@10.242.238.88:<0.714.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 981 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:04.325,ns_1@10.242.238.88:<0.456.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_997 [rebalance:info,2014-08-19T16:50:04.327,ns_1@10.242.238.88:<0.456.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[997]}, {checkpoints,[{997,1}]}, {name,<<"rebalance_997">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[997]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"997"}]} [rebalance:debug,2014-08-19T16:50:04.328,ns_1@10.242.238.88:<0.456.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.715.1> [rebalance:info,2014-08-19T16:50:04.328,ns_1@10.242.238.88:<0.456.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.330,ns_1@10.242.238.88:<0.456.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.331,ns_1@10.242.238.88:<0.456.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.331,ns_1@10.242.238.88:<0.30262.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 997 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.333,ns_1@10.242.238.88:<0.30270.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.335,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.335,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.336,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.336,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{981, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.336,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:04.337,ns_1@10.242.238.88:<0.30270.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_997_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:04.342,ns_1@10.242.238.88:<0.461.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_996 [rebalance:info,2014-08-19T16:50:04.344,ns_1@10.242.238.88:<0.461.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[996]}, {checkpoints,[{996,1}]}, {name,<<"rebalance_996">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[996]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"996"}]} [rebalance:debug,2014-08-19T16:50:04.345,ns_1@10.242.238.88:<0.461.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.739.1> [rebalance:info,2014-08-19T16:50:04.346,ns_1@10.242.238.88:<0.461.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:04.346,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 981 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.347,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 981) [ns_server:debug,2014-08-19T16:50:04.347,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.347,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 992 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:debug,2014-08-19T16:50:04.348,ns_1@10.242.238.88:<0.461.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.348,ns_1@10.242.238.88:<0.743.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 992 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:50:04.348,ns_1@10.242.238.88:<0.461.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.349,ns_1@10.242.238.88:<0.30339.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 996 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.350,ns_1@10.242.238.88:<0.30347.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:04.353,ns_1@10.242.238.88:<0.30347.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_996_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:04.358,ns_1@10.242.238.88:<0.460.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_994 [rebalance:info,2014-08-19T16:50:04.360,ns_1@10.242.238.88:<0.460.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[994]}, {checkpoints,[{994,1}]}, {name,<<"rebalance_994">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[994]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"994"}]} [rebalance:debug,2014-08-19T16:50:04.361,ns_1@10.242.238.88:<0.460.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.746.1> [ns_server:debug,2014-08-19T16:50:04.361,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:04.361,ns_1@10.242.238.88:<0.460.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:04.362,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.362,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.362,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{992, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.362,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:04.363,ns_1@10.242.238.88:<0.460.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.363,ns_1@10.242.238.88:<0.460.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.366,ns_1@10.242.238.88:<0.30512.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 994 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:04.369,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 992 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.369,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 992) [rebalance:debug,2014-08-19T16:50:04.370,ns_1@10.242.238.88:<0.30520.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.370,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.370,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 980 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:04.370,ns_1@10.242.238.88:<0.756.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 980 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:info,2014-08-19T16:50:04.373,ns_1@10.242.238.88:<0.30520.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_994_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:04.376,ns_1@10.242.238.88:<0.458.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_990 [rebalance:info,2014-08-19T16:50:04.377,ns_1@10.242.238.88:<0.458.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[990]}, {checkpoints,[{990,1}]}, {name,<<"rebalance_990">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[990]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"990"}]} [rebalance:debug,2014-08-19T16:50:04.379,ns_1@10.242.238.88:<0.458.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.760.1> [rebalance:info,2014-08-19T16:50:04.380,ns_1@10.242.238.88:<0.458.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.381,ns_1@10.242.238.88:<0.458.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.382,ns_1@10.242.238.88:<0.458.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.382,ns_1@10.242.238.88:<0.30821.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 990 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:04.383,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.383,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.383,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.384,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:04.384,ns_1@10.242.238.88:<0.30829.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.384,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{980, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.389,ns_1@10.242.238.88:<0.30829.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_990_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:04.389,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 980 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.390,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 980) [ns_server:debug,2014-08-19T16:50:04.391,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.391,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 988 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:04.391,ns_1@10.242.238.88:<0.772.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 988 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:04.398,ns_1@10.242.238.88:<0.430.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_486 [rebalance:info,2014-08-19T16:50:04.399,ns_1@10.242.238.88:<0.430.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[486]}, {checkpoints,[{486,1}]}, {name,<<"rebalance_486">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[486]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"486"}]} [rebalance:debug,2014-08-19T16:50:04.400,ns_1@10.242.238.88:<0.430.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.773.1> [rebalance:info,2014-08-19T16:50:04.401,ns_1@10.242.238.88:<0.430.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.403,ns_1@10.242.238.88:<0.430.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.403,ns_1@10.242.238.88:<0.430.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.404,ns_1@10.242.238.88:<0.30304.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 486 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:04.405,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:04.405,ns_1@10.242.238.88:<0.30312.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.406,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.406,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{988, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.406,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.407,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:04.409,ns_1@10.242.238.88:<0.30312.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_486_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:04.413,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 988 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.414,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 988) [ns_server:debug,2014-08-19T16:50:04.415,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:04.415,ns_1@10.242.238.88:<0.442.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_736 [rebalance:info,2014-08-19T16:50:04.415,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 983 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:04.415,ns_1@10.242.238.88:<0.786.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 983 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:50:04.416,ns_1@10.242.238.88:<0.442.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[736]}, {checkpoints,[{736,1}]}, {name,<<"rebalance_736">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[736]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"736"}]} [rebalance:debug,2014-08-19T16:50:04.416,ns_1@10.242.238.88:<0.442.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.787.1> [rebalance:info,2014-08-19T16:50:04.417,ns_1@10.242.238.88:<0.442.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:04.418,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 757. Nacking mccouch update. [views:debug,2014-08-19T16:50:04.418,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/757. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:04.418,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",757,active,0} [rebalance:debug,2014-08-19T16:50:04.419,ns_1@10.242.238.88:<0.442.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.419,ns_1@10.242.238.88:<0.442.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.420,ns_1@10.242.238.88:<0.30779.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 736 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:04.421,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829, 646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,777, 722,594,228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670, 542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618, 252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694,566,200, 1005,928,800,434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148, 876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773,718,590,224, 952,824,458,330,875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900, 772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976, 848,482,354,899,771,716,588,222,950,822,767,456,328,873,690,562,196,1001,924, 796,430,302,975,847,664,536,170,898,770,404,276,949,821,766,638,144,872,506, 378,1000,923,795,740,612,246,118,974,846,480,352,897,769,714,586,220,948,820, 765,454,326,999,871,688,560,194,986,922,858,794,492,428,364,300,973,909,845, 781,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,819,764, 700,636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921,857,793,738, 674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,895,831,712,648, 584,520,218,154,1023,946,882,818,763,452,388,324,260,1010,997,869,686,558, 192,920,792,426,298,971,843,660,532,166,894,400,272,1022,945,817,762,634,140, 996,868,502,374,919,791,736,608,242,114,970,842,476,348,893,710,582,216,1021, 944,816,761,450,322,995,867,684,556,190,918,790,424,296,969,841,658,530,164, 892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606,240,112, 968,840,474,346,891,708,580,214,1019,942,814,759,448,320,993,865,682,554,188, 916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992, 864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940, 812,757,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888, 394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964, 836,470,342,887,704,576,210,1015,938,810,444,316,989,861,678,550,184,912,784, 418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494, 366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442,314, 987,859,676,548,182,910,782,416,288,961,833,650,522,156,884,390,262,1012,935, 807,752,624,130] [ns_server:debug,2014-08-19T16:50:04.424,ns_1@10.242.238.88:<0.423.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_729 [rebalance:info,2014-08-19T16:50:04.425,ns_1@10.242.238.88:<0.423.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[729]}, {checkpoints,[{729,1}]}, {name,<<"rebalance_729">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[729]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"729"}]} [rebalance:debug,2014-08-19T16:50:04.426,ns_1@10.242.238.88:<0.423.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.788.1> [rebalance:info,2014-08-19T16:50:04.427,ns_1@10.242.238.88:<0.423.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:04.427,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.428,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:debug,2014-08-19T16:50:04.428,ns_1@10.242.238.88:<0.30787.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.428,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.428,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.429,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{983, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:04.430,ns_1@10.242.238.88:<0.423.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.430,ns_1@10.242.238.88:<0.423.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.431,ns_1@10.242.238.88:<0.31368.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 729 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:info,2014-08-19T16:50:04.432,ns_1@10.242.238.88:<0.30787.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_736_'ns_1@10.242.238.91'">>] [rebalance:debug,2014-08-19T16:50:04.433,ns_1@10.242.238.88:<0.31376.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:04.436,ns_1@10.242.238.88:<0.31376.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_729_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:04.438,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 983 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.439,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 983) [ns_server:debug,2014-08-19T16:50:04.440,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.440,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 982 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:04.440,ns_1@10.242.238.88:<0.803.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 982 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:04.446,ns_1@10.242.238.88:<0.437.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_730 [rebalance:info,2014-08-19T16:50:04.448,ns_1@10.242.238.88:<0.437.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[730]}, {checkpoints,[{730,1}]}, {name,<<"rebalance_730">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[730]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"730"}]} [rebalance:debug,2014-08-19T16:50:04.448,ns_1@10.242.238.88:<0.437.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.804.1> [rebalance:info,2014-08-19T16:50:04.449,ns_1@10.242.238.88:<0.437.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.451,ns_1@10.242.238.88:<0.437.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.451,ns_1@10.242.238.88:<0.437.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.452,ns_1@10.242.238.88:<0.31290.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 730 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:04.453,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:04.453,ns_1@10.242.238.88:<0.31298.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.454,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.454,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{982, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.454,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.455,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:04.456,ns_1@10.242.238.88:<0.31298.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_730_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:04.462,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 982 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.462,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 982) [ns_server:debug,2014-08-19T16:50:04.463,ns_1@10.242.238.88:<0.419.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_481 [ns_server:debug,2014-08-19T16:50:04.463,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.464,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 991 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:04.464,ns_1@10.242.238.88:<0.816.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 991 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:50:04.465,ns_1@10.242.238.88:<0.419.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[481]}, {checkpoints,[{481,1}]}, {name,<<"rebalance_481">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[481]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"481"}]} [rebalance:debug,2014-08-19T16:50:04.466,ns_1@10.242.238.88:<0.419.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.817.1> [rebalance:info,2014-08-19T16:50:04.467,ns_1@10.242.238.88:<0.419.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.469,ns_1@10.242.238.88:<0.419.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.469,ns_1@10.242.238.88:<0.419.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.469,ns_1@10.242.238.88:<0.30723.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 481 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.471,ns_1@10.242.238.88:<0.30731.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:04.474,ns_1@10.242.238.88:<0.30731.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_481_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:04.477,ns_1@10.242.238.88:<0.443.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_742 [ns_server:debug,2014-08-19T16:50:04.478,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.478,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:50:04.478,ns_1@10.242.238.88:<0.443.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[742]}, {checkpoints,[{742,1}]}, {name,<<"rebalance_742">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[742]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"742"}]} [ns_server:debug,2014-08-19T16:50:04.478,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.479,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.479,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{991, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:04.479,ns_1@10.242.238.88:<0.443.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.823.1> [rebalance:info,2014-08-19T16:50:04.480,ns_1@10.242.238.88:<0.443.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.482,ns_1@10.242.238.88:<0.443.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.482,ns_1@10.242.238.88:<0.443.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.483,ns_1@10.242.238.88:<0.30283.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 742 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.485,ns_1@10.242.238.88:<0.30291.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [views:debug,2014-08-19T16:50:04.485,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/757. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:04.485,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",757,active,0} [rebalance:info,2014-08-19T16:50:04.488,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 991 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:info,2014-08-19T16:50:04.488,ns_1@10.242.238.88:<0.30291.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_742_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:04.488,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 991) [ns_server:debug,2014-08-19T16:50:04.489,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.489,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 984 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:04.489,ns_1@10.242.238.88:<0.833.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 984 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:04.495,ns_1@10.242.238.88:<0.428.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_478 [rebalance:info,2014-08-19T16:50:04.497,ns_1@10.242.238.88:<0.428.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[478]}, {checkpoints,[{478,1}]}, {name,<<"rebalance_478">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[478]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"478"}]} [rebalance:debug,2014-08-19T16:50:04.498,ns_1@10.242.238.88:<0.428.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.834.1> [rebalance:info,2014-08-19T16:50:04.499,ns_1@10.242.238.88:<0.428.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.501,ns_1@10.242.238.88:<0.428.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.501,ns_1@10.242.238.88:<0.428.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.502,ns_1@10.242.238.88:<0.30940.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 478 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:04.503,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.503,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.503,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:04.503,ns_1@10.242.238.88:<0.30948.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.504,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.504,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{984, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.506,ns_1@10.242.238.88:<0.30948.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_478_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:04.509,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 984 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.510,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 984) [ns_server:debug,2014-08-19T16:50:04.511,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.511,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 989 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:04.511,ns_1@10.242.238.88:<0.847.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 989 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:04.514,ns_1@10.242.238.88:<0.427.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_480 [rebalance:info,2014-08-19T16:50:04.516,ns_1@10.242.238.88:<0.427.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[480]}, {checkpoints,[{480,1}]}, {name,<<"rebalance_480">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[480]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"480"}]} [rebalance:debug,2014-08-19T16:50:04.517,ns_1@10.242.238.88:<0.427.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.848.1> [rebalance:info,2014-08-19T16:50:04.517,ns_1@10.242.238.88:<0.427.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.519,ns_1@10.242.238.88:<0.427.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.519,ns_1@10.242.238.88:<0.427.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.520,ns_1@10.242.238.88:<0.30800.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 480 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.522,ns_1@10.242.238.88:<0.30808.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:04.525,ns_1@10.242.238.88:<0.30808.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_480_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:04.525,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.525,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.526,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.526,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{989, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.526,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.530,ns_1@10.242.238.88:<0.420.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_476 [rebalance:info,2014-08-19T16:50:04.532,ns_1@10.242.238.88:<0.420.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[476]}, {checkpoints,[{476,1}]}, {name,<<"rebalance_476">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[476]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"476"}]} [rebalance:debug,2014-08-19T16:50:04.533,ns_1@10.242.238.88:<0.420.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.858.1> [rebalance:info,2014-08-19T16:50:04.533,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 989 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.534,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 989) [rebalance:info,2014-08-19T16:50:04.534,ns_1@10.242.238.88:<0.420.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:04.535,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.535,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 993 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:04.535,ns_1@10.242.238.88:<0.861.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 993 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:debug,2014-08-19T16:50:04.535,ns_1@10.242.238.88:<0.420.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.536,ns_1@10.242.238.88:<0.420.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.537,ns_1@10.242.238.88:<0.31152.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 476 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.538,ns_1@10.242.238.88:<0.31160.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:04.541,ns_1@10.242.238.88:<0.31160.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_476_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:04.550,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.551,ns_1@10.242.238.88:<0.451.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_986 [ns_server:debug,2014-08-19T16:50:04.551,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.551,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.551,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.551,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{993, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:04.556,ns_1@10.242.238.88:<0.451.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[986]}, {checkpoints,[{986,1}]}, {name,<<"rebalance_986">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[986]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"986"}]} [rebalance:debug,2014-08-19T16:50:04.557,ns_1@10.242.238.88:<0.451.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.886.1> [rebalance:info,2014-08-19T16:50:04.558,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 993 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:50:04.558,ns_1@10.242.238.88:<0.451.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:04.559,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 993) [ns_server:debug,2014-08-19T16:50:04.559,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.560,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 997 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:04.560,ns_1@10.242.238.88:<0.889.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 997 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:debug,2014-08-19T16:50:04.560,ns_1@10.242.238.88:<0.451.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.560,ns_1@10.242.238.88:<0.451.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.561,ns_1@10.242.238.88:<0.31187.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 986 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.563,ns_1@10.242.238.88:<0.31195.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.566,ns_1@10.242.238.88:<0.429.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_731 [ns_server:info,2014-08-19T16:50:04.567,ns_1@10.242.238.88:<0.31195.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_986_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:04.568,ns_1@10.242.238.88:<0.429.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[731]}, {checkpoints,[{731,1}]}, {name,<<"rebalance_731">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[731]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"731"}]} [rebalance:debug,2014-08-19T16:50:04.569,ns_1@10.242.238.88:<0.429.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.892.1> [rebalance:info,2014-08-19T16:50:04.570,ns_1@10.242.238.88:<0.429.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.571,ns_1@10.242.238.88:<0.429.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.572,ns_1@10.242.238.88:<0.429.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.572,ns_1@10.242.238.88:<0.31208.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 731 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:04.574,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:04.574,ns_1@10.242.238.88:<0.31216.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.574,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.574,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.575,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{997, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.575,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:04.577,ns_1@10.242.238.88:<0.31216.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_731_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:04.582,ns_1@10.242.238.88:<0.434.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_737 [rebalance:info,2014-08-19T16:50:04.583,ns_1@10.242.238.88:<0.434.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[737]}, {checkpoints,[{737,1}]}, {name,<<"rebalance_737">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[737]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"737"}]} [rebalance:debug,2014-08-19T16:50:04.584,ns_1@10.242.238.88:<0.434.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.903.1> [rebalance:info,2014-08-19T16:50:04.585,ns_1@10.242.238.88:<0.434.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:04.585,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 997 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.586,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 997) [rebalance:debug,2014-08-19T16:50:04.586,ns_1@10.242.238.88:<0.434.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.587,ns_1@10.242.238.88:<0.434.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:04.587,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.587,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 996 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:04.587,ns_1@10.242.238.88:<0.906.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 996 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:50:04.587,ns_1@10.242.238.88:<0.30702.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 737 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:04.598,ns_1@10.242.238.88:<0.446.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_985 [rebalance:debug,2014-08-19T16:50:04.600,ns_1@10.242.238.88:<0.30710.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:04.600,ns_1@10.242.238.88:<0.446.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[985]}, {checkpoints,[{985,1}]}, {name,<<"rebalance_985">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[985]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"985"}]} [ns_server:debug,2014-08-19T16:50:04.601,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:04.601,ns_1@10.242.238.88:<0.446.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.909.1> [rebalance:info,2014-08-19T16:50:04.602,ns_1@10.242.238.88:<0.446.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:04.602,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{996, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.602,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.602,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:04.604,ns_1@10.242.238.88:<0.446.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.604,ns_1@10.242.238.88:<0.446.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:04.605,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:04.605,ns_1@10.242.238.88:<0.31269.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 985 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:info,2014-08-19T16:50:04.606,ns_1@10.242.238.88:<0.30710.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_737_'ns_1@10.242.238.91'">>] [rebalance:debug,2014-08-19T16:50:04.608,ns_1@10.242.238.88:<0.31277.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:04.611,ns_1@10.242.238.88:<0.31277.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_985_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:04.611,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 996 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.612,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 996) [ns_server:debug,2014-08-19T16:50:04.613,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.613,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 994 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:04.613,ns_1@10.242.238.88:<0.922.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 994 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:04.617,ns_1@10.242.238.88:<0.441.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_738 [rebalance:info,2014-08-19T16:50:04.619,ns_1@10.242.238.88:<0.441.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[738]}, {checkpoints,[{738,1}]}, {name,<<"rebalance_738">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[738]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"738"}]} [rebalance:debug,2014-08-19T16:50:04.620,ns_1@10.242.238.88:<0.441.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.923.1> [rebalance:info,2014-08-19T16:50:04.621,ns_1@10.242.238.88:<0.441.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.623,ns_1@10.242.238.88:<0.441.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.623,ns_1@10.242.238.88:<0.441.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.624,ns_1@10.242.238.88:<0.30625.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 738 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:04.627,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:04.628,ns_1@10.242.238.88:<0.30633.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.628,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.628,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{994, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.629,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.629,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.630,ns_1@10.242.238.88:<0.424.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_733 [ns_server:debug,2014-08-19T16:50:04.631,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 755. Nacking mccouch update. [views:debug,2014-08-19T16:50:04.631,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/755. Updated state: active (0) [ns_server:info,2014-08-19T16:50:04.631,ns_1@10.242.238.88:<0.30633.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_738_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:04.631,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",755,active,0} [rebalance:info,2014-08-19T16:50:04.632,ns_1@10.242.238.88:<0.424.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[733]}, {checkpoints,[{733,1}]}, {name,<<"rebalance_733">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[733]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"733"}]} [rebalance:debug,2014-08-19T16:50:04.633,ns_1@10.242.238.88:<0.424.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.933.1> [ns_server:debug,2014-08-19T16:50:04.633,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829, 646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,777, 722,594,228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670, 542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618, 252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694,566,200, 1005,928,800,434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148, 876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773,718,590,224, 952,824,458,330,875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900, 772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976, 848,482,354,899,771,716,588,222,950,822,767,456,328,873,690,562,196,1001,924, 796,430,302,975,847,664,536,170,898,770,404,276,949,821,766,638,144,872,506, 378,1000,923,795,740,612,246,118,974,846,480,352,897,769,714,586,220,948,820, 765,454,326,999,871,688,560,194,986,922,858,794,492,428,364,300,973,909,845, 781,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,819,764, 700,636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921,857,793,738, 674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,895,831,712,648, 584,520,218,154,1023,946,882,818,763,452,388,324,260,1010,997,869,686,558, 192,920,792,426,298,971,843,660,532,166,894,400,272,1022,945,817,762,634,140, 996,868,502,374,919,791,736,608,242,114,970,842,476,348,893,710,582,216,1021, 944,816,761,450,322,995,867,684,556,190,918,790,424,296,969,841,658,530,164, 892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606,240,112, 968,840,474,346,891,708,580,214,1019,942,814,759,448,320,993,865,682,554,188, 916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992, 864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940, 812,757,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888, 394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964, 836,470,342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912, 784,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860, 494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,442, 314,987,859,676,548,182,910,782,416,288,961,833,650,522,156,884,390,262,1012, 935,807,752,624,130] [rebalance:info,2014-08-19T16:50:04.634,ns_1@10.242.238.88:<0.424.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.636,ns_1@10.242.238.88:<0.424.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.636,ns_1@10.242.238.88:<0.424.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.637,ns_1@10.242.238.88:<0.30996.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 733 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.640,ns_1@10.242.238.88:<0.31004.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:04.640,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 994 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.640,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 994) [ns_server:debug,2014-08-19T16:50:04.641,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.641,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 990 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:04.642,ns_1@10.242.238.88:<0.936.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 990 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:info,2014-08-19T16:50:04.644,ns_1@10.242.238.88:<0.31004.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_733_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:04.648,ns_1@10.242.238.88:<0.433.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_728 [rebalance:info,2014-08-19T16:50:04.652,ns_1@10.242.238.88:<0.433.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[728]}, {checkpoints,[{728,1}]}, {name,<<"rebalance_728">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[728]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"728"}]} [rebalance:debug,2014-08-19T16:50:04.653,ns_1@10.242.238.88:<0.433.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.940.1> [rebalance:info,2014-08-19T16:50:04.654,ns_1@10.242.238.88:<0.433.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.656,ns_1@10.242.238.88:<0.433.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.656,ns_1@10.242.238.88:<0.433.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.657,ns_1@10.242.238.88:<0.31445.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 728 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:04.657,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.658,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.658,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.658,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{990, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.659,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:04.659,ns_1@10.242.238.88:<0.31453.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:04.663,ns_1@10.242.238.88:<0.31453.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_728_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:04.664,ns_1@10.242.238.88:<0.431.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_484 [rebalance:info,2014-08-19T16:50:04.668,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 990 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.669,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 990) [rebalance:info,2014-08-19T16:50:04.669,ns_1@10.242.238.88:<0.431.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[484]}, {checkpoints,[{484,1}]}, {name,<<"rebalance_484">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[484]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"484"}]} [ns_server:debug,2014-08-19T16:50:04.669,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.669,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 486 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:04.670,ns_1@10.242.238.88:<0.952.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 486 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:debug,2014-08-19T16:50:04.670,ns_1@10.242.238.88:<0.431.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.953.1> [rebalance:info,2014-08-19T16:50:04.671,ns_1@10.242.238.88:<0.431.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.674,ns_1@10.242.238.88:<0.431.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.674,ns_1@10.242.238.88:<0.431.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.675,ns_1@10.242.238.88:<0.30481.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 484 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.676,ns_1@10.242.238.88:<0.30497.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:04.682,ns_1@10.242.238.88:<0.30497.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_484_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:04.683,ns_1@10.242.238.88:<0.421.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_727 [ns_server:debug,2014-08-19T16:50:04.684,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:04.684,ns_1@10.242.238.88:<0.421.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[727]}, {checkpoints,[{727,1}]}, {name,<<"rebalance_727">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[727]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"727"}]} [ns_server:debug,2014-08-19T16:50:04.685,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.685,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.685,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{486, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:04.685,ns_1@10.242.238.88:<0.421.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.959.1> [ns_server:debug,2014-08-19T16:50:04.685,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:04.686,ns_1@10.242.238.88:<0.421.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.687,ns_1@10.242.238.88:<0.421.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.688,ns_1@10.242.238.88:<0.421.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.689,ns_1@10.242.238.88:<0.31522.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 727 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [views:debug,2014-08-19T16:50:04.689,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/755. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:04.690,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",755,active,0} [rebalance:debug,2014-08-19T16:50:04.691,ns_1@10.242.238.88:<0.31530.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:04.692,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 486 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.693,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 486) [ns_server:debug,2014-08-19T16:50:04.694,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.694,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 736 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:04.694,ns_1@10.242.238.88:<0.967.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 736 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:info,2014-08-19T16:50:04.695,ns_1@10.242.238.88:<0.31530.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_727_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:04.698,ns_1@10.242.238.88:<0.425.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_735 [rebalance:info,2014-08-19T16:50:04.699,ns_1@10.242.238.88:<0.425.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[735]}, {checkpoints,[{735,1}]}, {name,<<"rebalance_735">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[735]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"735"}]} [rebalance:debug,2014-08-19T16:50:04.700,ns_1@10.242.238.88:<0.425.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.970.1> [rebalance:info,2014-08-19T16:50:04.701,ns_1@10.242.238.88:<0.425.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.702,ns_1@10.242.238.88:<0.425.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.703,ns_1@10.242.238.88:<0.425.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.703,ns_1@10.242.238.88:<0.30842.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 735 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:04.705,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:04.705,ns_1@10.242.238.88:<0.30850.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.706,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.706,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.706,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.707,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{736, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:04.709,ns_1@10.242.238.88:<0.30850.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_735_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:04.713,ns_1@10.242.238.88:<0.416.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_472 [rebalance:info,2014-08-19T16:50:04.713,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 736 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.714,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 736) [ns_server:debug,2014-08-19T16:50:04.715,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.715,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 729 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:04.715,ns_1@10.242.238.88:<0.416.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[472]}, {checkpoints,[{472,1}]}, {name,<<"rebalance_472">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[472]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"472"}]} [rebalance:info,2014-08-19T16:50:04.715,ns_1@10.242.238.88:<0.983.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 729 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:debug,2014-08-19T16:50:04.716,ns_1@10.242.238.88:<0.416.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.984.1> [rebalance:info,2014-08-19T16:50:04.716,ns_1@10.242.238.88:<0.416.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.718,ns_1@10.242.238.88:<0.416.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.718,ns_1@10.242.238.88:<0.416.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.719,ns_1@10.242.238.88:<0.31480.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 472 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.723,ns_1@10.242.238.88:<0.31488.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:04.726,ns_1@10.242.238.88:<0.31488.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_472_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:04.727,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.728,ns_1@10.242.238.88:<0.436.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_739 [ns_server:debug,2014-08-19T16:50:04.728,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.728,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.728,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.729,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{729, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:04.732,ns_1@10.242.238.88:<0.436.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[739]}, {checkpoints,[{739,1}]}, {name,<<"rebalance_739">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[739]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"739"}]} [rebalance:debug,2014-08-19T16:50:04.732,ns_1@10.242.238.88:<0.436.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.994.1> [rebalance:info,2014-08-19T16:50:04.733,ns_1@10.242.238.88:<0.436.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.735,ns_1@10.242.238.88:<0.436.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.735,ns_1@10.242.238.88:<0.436.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.736,ns_1@10.242.238.88:<0.30533.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 739 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.738,ns_1@10.242.238.88:<0.30541.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:04.739,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 729 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.740,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 729) [ns_server:debug,2014-08-19T16:50:04.740,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.740,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 730 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:04.741,ns_1@10.242.238.88:<0.998.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 730 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:info,2014-08-19T16:50:04.742,ns_1@10.242.238.88:<0.30541.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_739_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:04.744,ns_1@10.242.238.88:<0.426.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_482 [rebalance:info,2014-08-19T16:50:04.745,ns_1@10.242.238.88:<0.426.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[482]}, {checkpoints,[{482,1}]}, {name,<<"rebalance_482">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[482]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"482"}]} [rebalance:debug,2014-08-19T16:50:04.746,ns_1@10.242.238.88:<0.426.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1001.1> [rebalance:info,2014-08-19T16:50:04.747,ns_1@10.242.238.88:<0.426.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.749,ns_1@10.242.238.88:<0.426.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.749,ns_1@10.242.238.88:<0.426.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.750,ns_1@10.242.238.88:<0.30646.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 482 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.751,ns_1@10.242.238.88:<0.30654.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.761,ns_1@10.242.238.88:<0.440.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_734 [ns_server:info,2014-08-19T16:50:04.761,ns_1@10.242.238.88:<0.30654.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_482_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:04.762,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.763,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:04.763,ns_1@10.242.238.88:<0.440.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[734]}, {checkpoints,[{734,1}]}, {name,<<"rebalance_734">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[734]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"734"}]} [ns_server:debug,2014-08-19T16:50:04.763,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{730, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.763,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.763,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:04.764,ns_1@10.242.238.88:<0.440.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1020.1> [rebalance:info,2014-08-19T16:50:04.764,ns_1@10.242.238.88:<0.440.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.766,ns_1@10.242.238.88:<0.440.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.766,ns_1@10.242.238.88:<0.440.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.768,ns_1@10.242.238.88:<0.30919.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 734 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.770,ns_1@10.242.238.88:<0.30927.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:04.770,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 730 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.771,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 730) [ns_server:debug,2014-08-19T16:50:04.771,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.772,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 481 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:04.772,ns_1@10.242.238.88:<0.1028.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 481 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:info,2014-08-19T16:50:04.773,ns_1@10.242.238.88:<0.30927.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_734_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:04.779,ns_1@10.242.238.88:<0.438.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_732 [rebalance:info,2014-08-19T16:50:04.781,ns_1@10.242.238.88:<0.438.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[732]}, {checkpoints,[{732,1}]}, {name,<<"rebalance_732">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[732]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"732"}]} [rebalance:debug,2014-08-19T16:50:04.782,ns_1@10.242.238.88:<0.438.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1032.1> [ns_server:debug,2014-08-19T16:50:04.782,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.783,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.783,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:04.783,ns_1@10.242.238.88:<0.438.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:04.783,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.783,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{481, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:04.785,ns_1@10.242.238.88:<0.438.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.785,ns_1@10.242.238.88:<0.438.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.786,ns_1@10.242.238.88:<0.31087.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 732 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.787,ns_1@10.242.238.88:<0.31095.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:04.789,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 481 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.790,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 481) [ns_server:debug,2014-08-19T16:50:04.791,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.791,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 742 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [ns_server:info,2014-08-19T16:50:04.791,ns_1@10.242.238.88:<0.31095.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_732_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:04.791,ns_1@10.242.238.88:<0.1044.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 742 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:04.795,ns_1@10.242.238.88:<0.418.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_483 [rebalance:info,2014-08-19T16:50:04.798,ns_1@10.242.238.88:<0.418.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[483]}, {checkpoints,[{483,1}]}, {name,<<"rebalance_483">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[483]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"483"}]} [rebalance:debug,2014-08-19T16:50:04.799,ns_1@10.242.238.88:<0.418.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1045.1> [rebalance:info,2014-08-19T16:50:04.799,ns_1@10.242.238.88:<0.418.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.801,ns_1@10.242.238.88:<0.418.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.801,ns_1@10.242.238.88:<0.418.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.802,ns_1@10.242.238.88:<0.30569.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 483 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:04.802,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.803,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.803,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.803,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.803,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{742, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:04.805,ns_1@10.242.238.88:<0.30577.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:04.808,ns_1@10.242.238.88:<0.30577.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_483_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:04.809,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 742 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.810,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 742) [ns_server:debug,2014-08-19T16:50:04.811,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.811,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 478 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:04.811,ns_1@10.242.238.88:<0.1057.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 478 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:04.812,ns_1@10.242.238.88:<0.435.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_726 [rebalance:info,2014-08-19T16:50:04.815,ns_1@10.242.238.88:<0.435.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[726]}, {checkpoints,[{726,1}]}, {name,<<"rebalance_726">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[726]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"726"}]} [ns_server:debug,2014-08-19T16:50:04.815,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 753. Nacking mccouch update. [views:debug,2014-08-19T16:50:04.815,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/753. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:04.815,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",753,active,0} [rebalance:debug,2014-08-19T16:50:04.815,ns_1@10.242.238.88:<0.435.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1058.1> [rebalance:info,2014-08-19T16:50:04.816,ns_1@10.242.238.88:<0.435.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:04.817,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829, 646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,777, 722,594,228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670, 542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618, 252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694,566,200, 1005,928,800,434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148, 876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773,718,590,224, 952,824,458,330,875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900, 772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976, 848,482,354,899,771,716,588,222,950,822,767,456,328,873,690,562,196,1001,924, 796,430,302,975,847,664,536,170,898,770,404,276,949,821,766,638,144,872,506, 378,1000,923,795,740,612,246,118,974,846,480,352,897,769,714,586,220,948,820, 765,454,326,999,871,688,560,194,986,922,858,794,492,428,364,300,973,909,845, 781,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,819,764, 700,636,572,206,142,1011,998,934,870,806,504,440,376,312,985,921,857,793,738, 674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,895,831,712,648, 584,520,218,154,1023,946,882,818,763,452,388,324,260,1010,997,869,686,558, 192,920,792,426,298,971,843,660,532,166,894,400,272,1022,945,817,762,634,140, 996,868,502,374,919,791,736,608,242,114,970,842,476,348,893,710,582,216,1021, 944,816,761,450,322,995,867,684,556,190,918,790,424,296,969,841,658,530,164, 892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606,240,112, 968,840,474,346,891,708,580,214,1019,942,814,759,448,320,993,865,682,554,188, 916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992, 864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940, 812,757,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888, 394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964, 836,470,342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912, 784,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860, 494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753, 442,314,987,859,676,548,182,910,782,416,288,961,833,650,522,156,884,390,262, 1012,935,807,752,624,130] [rebalance:debug,2014-08-19T16:50:04.818,ns_1@10.242.238.88:<0.435.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.818,ns_1@10.242.238.88:<0.435.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.819,ns_1@10.242.238.88:<0.31613.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 726 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.821,ns_1@10.242.238.88:<0.31621.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.821,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.822,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.823,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.823,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{478, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.823,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:04.825,ns_1@10.242.238.88:<0.31621.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_726_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:04.829,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 478 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.830,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 478) [ns_server:debug,2014-08-19T16:50:04.831,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:04.831,ns_1@10.242.238.88:<0.422.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_485 [rebalance:info,2014-08-19T16:50:04.831,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 480 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:04.831,ns_1@10.242.238.88:<0.1070.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 480 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:info,2014-08-19T16:50:04.832,ns_1@10.242.238.88:<0.422.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[485]}, {checkpoints,[{485,1}]}, {name,<<"rebalance_485">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[485]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"485"}]} [rebalance:debug,2014-08-19T16:50:04.833,ns_1@10.242.238.88:<0.422.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1071.1> [rebalance:info,2014-08-19T16:50:04.834,ns_1@10.242.238.88:<0.422.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.835,ns_1@10.242.238.88:<0.422.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.835,ns_1@10.242.238.88:<0.422.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.836,ns_1@10.242.238.88:<0.30400.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 485 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:04.838,ns_1@10.242.238.88:<0.30408.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:04.841,ns_1@10.242.238.88:<0.30408.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_485_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:04.841,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.842,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{480, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.843,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.843,ns_1@10.242.238.88:<0.417.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_474 [ns_server:debug,2014-08-19T16:50:04.844,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.844,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:04.845,ns_1@10.242.238.88:<0.417.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[474]}, {checkpoints,[{474,1}]}, {name,<<"rebalance_474">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[474]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"474"}]} [rebalance:debug,2014-08-19T16:50:04.845,ns_1@10.242.238.88:<0.417.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.1077.1> [rebalance:info,2014-08-19T16:50:04.847,ns_1@10.242.238.88:<0.417.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:04.848,ns_1@10.242.238.88:<0.417.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:04.849,ns_1@10.242.238.88:<0.417.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:04.849,ns_1@10.242.238.88:<0.31311.0>:janitor_agent:set_vbucket_state:518]Doing vbucket 474 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:04.850,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 480 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.851,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 480) [rebalance:debug,2014-08-19T16:50:04.851,ns_1@10.242.238.88:<0.31319.0>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:04.851,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.851,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 476 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:04.852,ns_1@10.242.238.88:<0.1085.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 476 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:info,2014-08-19T16:50:04.854,ns_1@10.242.238.88:<0.31319.0>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_474_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:04.862,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.862,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.863,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.863,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.863,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{476, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:04.873,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 476 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.874,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 476) [views:debug,2014-08-19T16:50:04.874,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/753. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:04.874,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",753,active,0} [ns_server:debug,2014-08-19T16:50:04.875,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.875,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 986 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:04.875,ns_1@10.242.238.88:<0.1097.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 986 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:04.896,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.897,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.897,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.897,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.897,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{986, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:04.904,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 986 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.904,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 986) [ns_server:debug,2014-08-19T16:50:04.905,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.906,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 731 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:04.906,ns_1@10.242.238.88:<0.1108.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 731 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:04.917,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.918,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{731, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.918,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.918,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.918,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:04.944,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 731 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.945,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 731) [ns_server:debug,2014-08-19T16:50:04.946,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.946,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 737 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:04.946,ns_1@10.242.238.88:<0.1119.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 737 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:04.960,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.960,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.960,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:04.960,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.960,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{737, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:04.966,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 737 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 737) [ns_server:debug,2014-08-19T16:50:04.968,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.968,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 985 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:04.968,ns_1@10.242.238.88:<0.1144.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 985 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:04.982,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.982,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 751. Nacking mccouch update. [ns_server:debug,2014-08-19T16:50:04.982,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [views:debug,2014-08-19T16:50:04.982,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/751. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:04.983,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.983,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",751,active,0} [ns_server:debug,2014-08-19T16:50:04.983,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{985, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:04.983,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:04.985,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,438,310,983,855,672,544,178,906,778,412,284,957,829, 646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905,777, 722,594,228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853,670, 542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746,618, 252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694,566,200, 1005,928,800,434,306,979,851,668,540,174,902,774,408,280,953,825,642,514,148, 876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773,718,590,224, 952,824,458,330,875,692,564,198,1003,926,798,432,304,977,849,666,538,172,900, 772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742,614,248,120,976, 848,482,354,899,771,716,588,222,950,822,767,456,328,873,690,562,196,1001,924, 796,430,302,975,847,664,536,170,898,770,404,276,949,821,766,638,144,872,506, 378,1000,923,795,740,612,246,118,974,846,480,352,897,769,714,586,220,948,820, 765,454,326,999,871,688,560,194,986,922,858,794,492,428,364,300,973,909,845, 781,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,819,764, 700,636,572,206,142,1011,998,934,870,806,751,504,440,376,312,985,921,857,793, 738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,895,831,712, 648,584,520,218,154,1023,946,882,818,763,452,388,324,260,1010,997,869,686, 558,192,920,792,426,298,971,843,660,532,166,894,400,272,1022,945,817,762,634, 140,996,868,502,374,919,791,736,608,242,114,970,842,476,348,893,710,582,216, 1021,944,816,761,450,322,995,867,684,556,190,918,790,424,296,969,841,658,530, 164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606,240, 112,968,840,474,346,891,708,580,214,1019,942,814,759,448,320,993,865,682,554, 188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136, 992,864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017, 940,812,757,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160, 888,394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108, 964,836,470,342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184, 912,784,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988, 860,494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808, 753,442,314,987,859,676,548,182,910,782,416,288,961,833,650,522,156,884,390, 262,1012,935,807,752,624,130] [rebalance:info,2014-08-19T16:50:04.988,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 985 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:04.989,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 985) [ns_server:debug,2014-08-19T16:50:04.990,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:04.990,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 738 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:04.990,ns_1@10.242.238.88:<0.1155.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 738 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:05.001,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.001,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.002,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{738, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:05.002,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:05.002,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:05.008,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 738 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:05.009,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 738) [ns_server:debug,2014-08-19T16:50:05.010,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:05.010,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 733 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:05.010,ns_1@10.242.238.88:<0.1166.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 733 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [views:debug,2014-08-19T16:50:05.016,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/751. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:05.016,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",751,active,0} [ns_server:debug,2014-08-19T16:50:05.022,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.023,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:05.023,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.023,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.023,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{733, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:05.031,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 733 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:05.032,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 733) [ns_server:debug,2014-08-19T16:50:05.033,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:05.033,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 728 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:05.033,ns_1@10.242.238.88:<0.1176.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 728 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:05.045,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.045,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:05.045,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.046,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{728, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:05.046,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:05.051,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 728 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:05.051,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 728) [ns_server:debug,2014-08-19T16:50:05.052,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:05.052,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 484 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:05.052,ns_1@10.242.238.88:<0.1187.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 484 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:05.061,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.062,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.063,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:05.063,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{484, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:05.063,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:05.073,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 484 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:05.074,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:05.074,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 484) [rebalance:info,2014-08-19T16:50:05.074,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 727 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:05.074,ns_1@10.242.238.88:<0.1212.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 727 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:05.087,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.088,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.088,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:05.088,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.088,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{727, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:05.095,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 727 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:05.095,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 727) [ns_server:debug,2014-08-19T16:50:05.096,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:05.096,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 735 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:05.097,ns_1@10.242.238.88:<0.1222.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 735 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:05.114,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.115,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.115,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:05.115,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{735, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:05.115,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.116,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 749. Nacking mccouch update. [views:debug,2014-08-19T16:50:05.116,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/749. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:05.117,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",749,active,0} [ns_server:debug,2014-08-19T16:50:05.119,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,412,284,957, 829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905, 777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,436,308,981,853, 670,542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801,746, 618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694,566, 200,1005,928,800,434,306,979,851,668,540,174,902,774,408,280,953,825,642,514, 148,876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773,718,590, 224,952,824,458,330,875,692,564,198,1003,926,798,432,304,977,849,666,538,172, 900,772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742,614,248,120, 976,848,482,354,899,771,716,588,222,950,822,767,456,328,873,690,562,196,1001, 924,796,430,302,975,847,664,536,170,898,770,404,276,949,821,766,638,144,872, 506,378,1000,923,795,740,612,246,118,974,846,480,352,897,769,714,586,220,948, 820,765,454,326,999,871,688,560,194,922,794,428,300,973,909,845,781,726,662, 598,534,232,168,960,896,832,768,466,402,338,274,947,883,819,764,700,636,572, 206,142,1011,998,934,870,806,751,504,440,376,312,985,921,857,793,738,674,610, 546,244,180,116,972,908,844,780,478,414,350,286,959,895,831,712,648,584,520, 218,154,1023,946,882,818,763,452,388,324,260,1010,997,869,686,558,192,920, 792,426,298,971,843,660,532,166,894,400,272,1022,945,817,762,634,140,996,868, 502,374,919,791,736,608,242,114,970,842,476,348,893,710,582,216,1021,944,816, 761,450,322,995,867,684,556,190,918,790,424,296,969,841,658,530,164,892,398, 270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606,240,112,968,840, 474,346,891,708,580,214,1019,942,814,759,448,320,993,865,682,554,188,916,788, 422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992,864,498, 370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812,757, 446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888,394,266, 1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470, 342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,418, 290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494,366, 911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314, 987,859,676,548,182,910,782,416,288,961,833,650,522,156,884,390,262,1012,935, 807,752,624,130,986,858,492,364] [rebalance:info,2014-08-19T16:50:05.121,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 735 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:05.122,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 735) [ns_server:debug,2014-08-19T16:50:05.123,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:05.123,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 472 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:05.123,ns_1@10.242.238.88:<0.1233.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 472 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:05.134,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.135,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.135,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:05.135,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.135,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{472, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:05.142,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 472 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:05.142,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 472) [ns_server:debug,2014-08-19T16:50:05.144,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:05.144,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 739 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:05.144,ns_1@10.242.238.88:<0.1244.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 739 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:05.156,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.157,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:05.157,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.158,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.158,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{739, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:05.166,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 739 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:05.167,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 739) [ns_server:debug,2014-08-19T16:50:05.168,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:05.168,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 482 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:05.168,ns_1@10.242.238.88:<0.1260.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 482 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:05.178,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.178,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:05.179,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.179,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.179,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{482, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:05.184,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 482 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:05.185,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 482) [ns_server:debug,2014-08-19T16:50:05.186,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:05.186,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 734 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:05.186,ns_1@10.242.238.88:<0.1270.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 734 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [views:debug,2014-08-19T16:50:05.200,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/749. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:05.201,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",749,active,0} [ns_server:debug,2014-08-19T16:50:05.208,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.209,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{734, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:05.209,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:05.210,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.211,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:05.220,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 734 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:05.220,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 734) [ns_server:debug,2014-08-19T16:50:05.221,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:05.221,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 732 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:05.221,ns_1@10.242.238.88:<0.1281.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 732 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:05.233,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.234,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{732, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:05.235,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.235,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:05.235,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:05.241,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 732 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:05.241,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 732) [ns_server:debug,2014-08-19T16:50:05.242,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:05.242,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 483 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:05.242,ns_1@10.242.238.88:<0.1306.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 483 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:05.252,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.253,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:05.253,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.253,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{483, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:05.253,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:05.259,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 483 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:05.259,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 483) [ns_server:debug,2014-08-19T16:50:05.260,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:05.260,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 726 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:05.260,ns_1@10.242.238.88:<0.1316.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 726 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:05.272,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.273,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:05.273,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.273,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{726, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:05.273,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:05.284,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 726 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:05.284,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 726) [ns_server:debug,2014-08-19T16:50:05.285,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:05.285,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 485 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:05.285,ns_1@10.242.238.88:<0.1327.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 485 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:05.295,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.295,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:05.296,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.296,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{485, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:05.296,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:05.305,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 485 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:05.306,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 485) [ns_server:debug,2014-08-19T16:50:05.307,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:05.307,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 474 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:05.307,ns_1@10.242.238.88:<0.1338.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 474 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:05.317,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.318,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.318,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:05.318,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{474, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:05.318,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:05.319,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 747. Nacking mccouch update. [views:debug,2014-08-19T16:50:05.319,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/747. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:05.319,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",747,active,0} [ns_server:debug,2014-08-19T16:50:05.320,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,412,284,957, 829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905, 777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308,981, 853,670,542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801, 746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694, 566,200,1005,928,800,434,306,979,851,668,540,174,902,774,408,280,953,825,642, 514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773,718, 590,224,952,824,458,330,875,692,564,198,1003,926,798,432,304,977,849,666,538, 172,900,772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742,614,248, 120,976,848,482,354,899,771,716,588,222,950,822,767,456,328,873,690,562,196, 1001,924,796,430,302,975,847,664,536,170,898,770,404,276,949,821,766,638,144, 872,506,378,1000,923,795,740,612,246,118,974,846,480,352,897,769,714,586,220, 948,820,765,454,326,999,871,688,560,194,922,794,428,300,973,909,845,781,726, 662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,819,764,700,636, 572,206,142,1011,998,934,870,806,751,504,440,376,312,985,921,857,793,738,674, 610,546,244,180,116,972,908,844,780,478,414,350,286,959,895,831,712,648,584, 520,218,154,1023,946,882,818,763,452,388,324,260,1010,997,869,686,558,192, 920,792,426,298,971,843,660,532,166,894,400,272,1022,945,817,762,634,140,996, 868,502,374,919,791,736,608,242,114,970,842,476,348,893,710,582,216,1021,944, 816,761,450,322,995,867,684,556,190,918,790,424,296,969,841,658,530,164,892, 398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606,240,112,968, 840,474,346,891,708,580,214,1019,942,814,759,448,320,993,865,682,554,188,916, 788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992,864, 498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812, 757,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888,394, 266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784, 418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494, 366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442, 314,987,859,676,548,182,910,782,416,288,961,833,650,522,156,884,390,262,1012, 935,807,752,624,130,986,858,492,364] [rebalance:info,2014-08-19T16:50:05.323,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 474 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:05.327,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{compact,'ns_1@10.242.238.88'}] [ns_server:debug,2014-08-19T16:50:05.327,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 474) [ns_server:debug,2014-08-19T16:50:05.329,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"default">>] [ns_server:debug,2014-08-19T16:50:05.330,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:compact_next_bucket:1453]Going to spawn bucket compaction with forced view compaction for bucket default [ns_server:debug,2014-08-19T16:50:05.330,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:compact_next_bucket:1482]Spawned 'uninhibited' compaction for default [ns_server:info,2014-08-19T16:50:05.334,ns_1@10.242.238.88:<0.1348.1>:compaction_daemon:check_all_dbs_exist:1611]Skipping compaction of bucket `default` since at least database `default/0` seems to be missing. [ns_server:debug,2014-08-19T16:50:05.335,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:50:05.335,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:handle_info:203]noted compaction done: {compact,'ns_1@10.242.238.88'} [ns_server:debug,2014-08-19T16:50:05.335,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:50:05.340,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{979, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:05.340,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",979, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.1350.1>) [ns_server:debug,2014-08-19T16:50:05.340,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 979) [ns_server:debug,2014-08-19T16:50:05.340,ns_1@10.242.238.88:<0.1351.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:05.341,ns_1@10.242.238.88:<0.1350.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 979 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:05.341,ns_1@10.242.238.88:<0.1356.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 979 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:05.341,ns_1@10.242.238.88:<0.1357.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 979 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:05.345,ns_1@10.242.238.88:<0.1358.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 979 into 'ns_1@10.242.238.90' is <18125.21466.0> [ns_server:debug,2014-08-19T16:50:05.346,ns_1@10.242.238.88:<0.1358.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 979 into 'ns_1@10.242.238.91' is <18126.22643.0> [rebalance:debug,2014-08-19T16:50:05.346,ns_1@10.242.238.88:<0.1350.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 979 is <0.1358.1> [views:debug,2014-08-19T16:50:05.352,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/747. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:05.353,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",747,active,0} [ns_server:debug,2014-08-19T16:50:05.386,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452605,377051}, tap_estimate, {replica_building,"default",979,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21466.0>, <<"replication_building_979_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:05.396,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452605,387013}, tap_estimate, {replica_building,"default",979,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22643.0>, <<"replication_building_979_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:05.396,ns_1@10.242.238.88:<0.1359.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.22643.0>}, {'ns_1@10.242.238.90',<18125.21466.0>}]) [rebalance:info,2014-08-19T16:50:05.396,ns_1@10.242.238.88:<0.1350.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:05.397,ns_1@10.242.238.88:<0.1350.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 979 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:05.397,ns_1@10.242.238.88:<0.1350.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:05.398,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{979, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:05.402,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{725, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:05.402,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",725, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.1371.1>) [ns_server:debug,2014-08-19T16:50:05.402,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 725) [ns_server:debug,2014-08-19T16:50:05.403,ns_1@10.242.238.88:<0.1372.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:05.403,ns_1@10.242.238.88:<0.1372.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:05.403,ns_1@10.242.238.88:<0.1371.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 725 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:05.403,ns_1@10.242.238.88:<0.1377.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 725 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:05.403,ns_1@10.242.238.88:<0.1378.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 725 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:05.407,ns_1@10.242.238.88:<0.1379.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 725 into 'ns_1@10.242.238.91' is <18126.22649.0> [ns_server:debug,2014-08-19T16:50:05.408,ns_1@10.242.238.88:<0.1379.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 725 into 'ns_1@10.242.238.90' is <18125.21485.0> [rebalance:debug,2014-08-19T16:50:05.408,ns_1@10.242.238.88:<0.1371.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 725 is <0.1379.1> [ns_server:debug,2014-08-19T16:50:05.442,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452605,433360}, tap_estimate, {replica_building,"default",725,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22649.0>, <<"replication_building_725_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:05.457,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452605,448709}, tap_estimate, {replica_building,"default",725,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21485.0>, <<"replication_building_725_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:05.458,ns_1@10.242.238.88:<0.1380.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.21485.0>}, {'ns_1@10.242.238.91',<18126.22649.0>}]) [rebalance:info,2014-08-19T16:50:05.458,ns_1@10.242.238.88:<0.1371.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:05.458,ns_1@10.242.238.88:<0.1371.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 725 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:05.460,ns_1@10.242.238.88:<0.1371.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:05.460,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{725, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:05.464,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{469, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:05.464,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",469, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.1407.1>) [ns_server:debug,2014-08-19T16:50:05.464,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 469) [ns_server:debug,2014-08-19T16:50:05.465,ns_1@10.242.238.88:<0.1408.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:05.465,ns_1@10.242.238.88:<0.1408.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:05.465,ns_1@10.242.238.88:<0.1407.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 469 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:05.465,ns_1@10.242.238.88:<0.1413.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 469 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:05.465,ns_1@10.242.238.88:<0.1414.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 469 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:05.469,ns_1@10.242.238.88:<0.1415.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 469 into 'ns_1@10.242.238.91' is <18126.22668.0> [ns_server:debug,2014-08-19T16:50:05.471,ns_1@10.242.238.88:<0.1415.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 469 into 'ns_1@10.242.238.89' is <18124.26380.0> [rebalance:debug,2014-08-19T16:50:05.471,ns_1@10.242.238.88:<0.1407.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 469 is <0.1415.1> [ns_server:debug,2014-08-19T16:50:05.505,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452605,496481}, tap_estimate, {replica_building,"default",469,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22668.0>, <<"replication_building_469_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:05.512,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 745. Nacking mccouch update. [views:debug,2014-08-19T16:50:05.512,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/745. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:05.512,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",745,active,0} [ns_server:debug,2014-08-19T16:50:05.514,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,412,284,957, 829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905, 777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308,981, 853,670,542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801, 746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694, 566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,408,280,953,825, 642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773, 718,590,224,952,824,458,330,875,692,564,198,1003,926,798,432,304,977,849,666, 538,172,900,772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742,614, 248,120,976,848,482,354,899,771,716,588,222,950,822,767,456,328,873,690,562, 196,1001,924,796,430,302,975,847,664,536,170,898,770,404,276,949,821,766,638, 144,872,506,378,1000,923,795,740,612,246,118,974,846,480,352,897,769,714,586, 220,948,820,765,454,326,999,871,688,560,194,922,794,428,300,973,909,845,781, 726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,819,764,700, 636,572,206,142,1011,998,934,870,806,751,504,440,376,312,985,921,857,793,738, 674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,895,831,712,648, 584,520,218,154,1023,946,882,818,763,452,388,324,260,1010,997,869,686,558, 192,920,792,426,298,971,843,660,532,166,894,400,272,1022,945,817,762,634,140, 996,868,502,374,919,791,736,608,242,114,970,842,476,348,893,710,582,216,1021, 944,816,761,450,322,995,867,684,556,190,918,790,424,296,969,841,658,530,164, 892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606,240,112, 968,840,474,346,891,708,580,214,1019,942,814,759,448,320,993,865,682,554,188, 916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992, 864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940, 812,757,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888, 394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964, 836,470,342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912, 784,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860, 494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753, 442,314,987,859,676,548,182,910,782,416,288,961,833,650,522,156,884,390,262, 1012,935,807,752,624,130,986,858,492,364] [ns_server:debug,2014-08-19T16:50:05.527,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452605,518337}, tap_estimate, {replica_building,"default",469,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26380.0>, <<"replication_building_469_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:05.527,ns_1@10.242.238.88:<0.1416.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26380.0>}, {'ns_1@10.242.238.91',<18126.22668.0>}]) [rebalance:info,2014-08-19T16:50:05.527,ns_1@10.242.238.88:<0.1407.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:05.528,ns_1@10.242.238.88:<0.1407.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 469 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:05.528,ns_1@10.242.238.88:<0.1407.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:05.529,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{469, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:05.533,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{978, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:05.533,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",978, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.1428.1>) [ns_server:debug,2014-08-19T16:50:05.533,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 978) [ns_server:debug,2014-08-19T16:50:05.534,ns_1@10.242.238.88:<0.1429.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:05.534,ns_1@10.242.238.88:<0.1429.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:05.534,ns_1@10.242.238.88:<0.1428.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 978 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:05.534,ns_1@10.242.238.88:<0.1434.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 978 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:05.534,ns_1@10.242.238.88:<0.1435.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 978 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:05.538,ns_1@10.242.238.88:<0.1436.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 978 into 'ns_1@10.242.238.90' is <18125.21511.0> [ns_server:debug,2014-08-19T16:50:05.541,ns_1@10.242.238.88:<0.1436.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 978 into 'ns_1@10.242.238.91' is <18126.22673.0> [rebalance:debug,2014-08-19T16:50:05.541,ns_1@10.242.238.88:<0.1428.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 978 is <0.1436.1> [views:debug,2014-08-19T16:50:05.571,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/745. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:05.571,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",745,active,0} [ns_server:debug,2014-08-19T16:50:05.574,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452605,565529}, tap_estimate, {replica_building,"default",978,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21511.0>, <<"replication_building_978_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:05.590,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452605,581586}, tap_estimate, {replica_building,"default",978,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22673.0>, <<"replication_building_978_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:05.591,ns_1@10.242.238.88:<0.1437.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.22673.0>}, {'ns_1@10.242.238.90',<18125.21511.0>}]) [rebalance:info,2014-08-19T16:50:05.591,ns_1@10.242.238.88:<0.1428.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:05.591,ns_1@10.242.238.88:<0.1428.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 978 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:05.592,ns_1@10.242.238.88:<0.1428.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:05.593,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{978, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:05.597,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{724, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:05.597,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",724, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.1449.1>) [ns_server:debug,2014-08-19T16:50:05.597,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 724) [ns_server:debug,2014-08-19T16:50:05.597,ns_1@10.242.238.88:<0.1450.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:05.597,ns_1@10.242.238.88:<0.1450.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:05.598,ns_1@10.242.238.88:<0.1449.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 724 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:05.598,ns_1@10.242.238.88:<0.1455.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 724 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:05.598,ns_1@10.242.238.88:<0.1456.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 724 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:05.601,ns_1@10.242.238.88:<0.1457.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 724 into 'ns_1@10.242.238.91' is <18126.22680.0> [ns_server:debug,2014-08-19T16:50:05.604,ns_1@10.242.238.88:<0.1457.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 724 into 'ns_1@10.242.238.90' is <18125.21516.0> [rebalance:debug,2014-08-19T16:50:05.604,ns_1@10.242.238.88:<0.1449.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 724 is <0.1457.1> [ns_server:debug,2014-08-19T16:50:05.636,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452605,627890}, tap_estimate, {replica_building,"default",724,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22680.0>, <<"replication_building_724_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:05.652,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452605,643842}, tap_estimate, {replica_building,"default",724,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21516.0>, <<"replication_building_724_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:05.653,ns_1@10.242.238.88:<0.1458.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.21516.0>}, {'ns_1@10.242.238.91',<18126.22680.0>}]) [rebalance:info,2014-08-19T16:50:05.653,ns_1@10.242.238.88:<0.1449.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:05.654,ns_1@10.242.238.88:<0.1449.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 724 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:05.654,ns_1@10.242.238.88:<0.1449.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:05.655,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{724, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:05.659,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{468, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:05.659,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",468, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.1484.1>) [ns_server:debug,2014-08-19T16:50:05.659,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 468) [ns_server:debug,2014-08-19T16:50:05.659,ns_1@10.242.238.88:<0.1485.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:05.660,ns_1@10.242.238.88:<0.1485.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:05.660,ns_1@10.242.238.88:<0.1484.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 468 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:05.660,ns_1@10.242.238.88:<0.1490.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 468 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:05.660,ns_1@10.242.238.88:<0.1491.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 468 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:05.664,ns_1@10.242.238.88:<0.1492.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 468 into 'ns_1@10.242.238.91' is <18126.22685.0> [ns_server:debug,2014-08-19T16:50:05.667,ns_1@10.242.238.88:<0.1492.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 468 into 'ns_1@10.242.238.89' is <18124.26400.0> [rebalance:debug,2014-08-19T16:50:05.667,ns_1@10.242.238.88:<0.1484.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 468 is <0.1492.1> [ns_server:debug,2014-08-19T16:50:05.707,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452605,691701}, tap_estimate, {replica_building,"default",468,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22685.0>, <<"replication_building_468_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:05.715,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452605,706424}, tap_estimate, {replica_building,"default",468,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26400.0>, <<"replication_building_468_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:05.715,ns_1@10.242.238.88:<0.1493.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26400.0>}, {'ns_1@10.242.238.91',<18126.22685.0>}]) [rebalance:info,2014-08-19T16:50:05.716,ns_1@10.242.238.88:<0.1484.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:05.716,ns_1@10.242.238.88:<0.1484.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 468 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:05.717,ns_1@10.242.238.88:<0.1484.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:05.717,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{468, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:05.721,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{977, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:05.721,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",977, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.1505.1>) [ns_server:debug,2014-08-19T16:50:05.722,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 977) [ns_server:debug,2014-08-19T16:50:05.722,ns_1@10.242.238.88:<0.1506.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:05.722,ns_1@10.242.238.88:<0.1506.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:05.722,ns_1@10.242.238.88:<0.1505.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 977 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:05.722,ns_1@10.242.238.88:<0.1511.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 977 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:05.723,ns_1@10.242.238.88:<0.1512.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 977 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:05.726,ns_1@10.242.238.88:<0.1513.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 977 into 'ns_1@10.242.238.90' is <18125.21536.0> [ns_server:debug,2014-08-19T16:50:05.728,ns_1@10.242.238.88:<0.1513.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 977 into 'ns_1@10.242.238.91' is <18126.22704.0> [rebalance:debug,2014-08-19T16:50:05.728,ns_1@10.242.238.88:<0.1505.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 977 is <0.1513.1> [ns_server:debug,2014-08-19T16:50:05.738,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 743. Nacking mccouch update. [views:debug,2014-08-19T16:50:05.738,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/743. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:05.738,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",743,active,0} [ns_server:debug,2014-08-19T16:50:05.740,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,412,284,957, 829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905, 777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308,981, 853,670,542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801, 746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694, 566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,408,280,953,825, 642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773, 718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432,304,977,849, 666,538,172,900,772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742, 614,248,120,976,848,482,354,899,771,716,588,222,950,822,767,456,328,873,690, 562,196,1001,924,796,430,302,975,847,664,536,170,898,770,404,276,949,821,766, 638,144,872,506,378,1000,923,795,740,612,246,118,974,846,480,352,897,769,714, 586,220,948,820,765,454,326,999,871,688,560,194,922,794,428,300,973,909,845, 781,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,819,764, 700,636,572,206,142,1011,998,934,870,806,751,504,440,376,312,985,921,857,793, 738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,895,831,712, 648,584,520,218,154,1023,946,882,818,763,452,388,324,260,1010,997,869,686, 558,192,920,792,426,298,971,843,660,532,166,894,400,272,1022,945,817,762,634, 140,996,868,502,374,919,791,736,608,242,114,970,842,476,348,893,710,582,216, 1021,944,816,761,450,322,995,867,684,556,190,918,790,424,296,969,841,658,530, 164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606,240, 112,968,840,474,346,891,708,580,214,1019,942,814,759,448,320,993,865,682,554, 188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136, 992,864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017, 940,812,757,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160, 888,394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108, 964,836,470,342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184, 912,784,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988, 860,494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808, 753,442,314,987,859,676,548,182,910,782,416,288,961,833,650,522,156,884,390, 262,1012,935,807,752,624,130,986,858,492,364] [ns_server:debug,2014-08-19T16:50:05.761,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452605,752117}, tap_estimate, {replica_building,"default",977,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21536.0>, <<"replication_building_977_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:05.779,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452605,770233}, tap_estimate, {replica_building,"default",977,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22704.0>, <<"replication_building_977_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:05.779,ns_1@10.242.238.88:<0.1514.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.22704.0>}, {'ns_1@10.242.238.90',<18125.21536.0>}]) [rebalance:info,2014-08-19T16:50:05.779,ns_1@10.242.238.88:<0.1505.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:05.780,ns_1@10.242.238.88:<0.1505.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 977 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:05.780,ns_1@10.242.238.88:<0.1505.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:05.781,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{977, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:05.785,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{723, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:05.785,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",723, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.1526.1>) [ns_server:debug,2014-08-19T16:50:05.785,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 723) [ns_server:debug,2014-08-19T16:50:05.786,ns_1@10.242.238.88:<0.1527.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:05.786,ns_1@10.242.238.88:<0.1527.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:05.786,ns_1@10.242.238.88:<0.1526.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 723 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:05.786,ns_1@10.242.238.88:<0.1532.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 723 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:05.786,ns_1@10.242.238.88:<0.1533.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 723 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:05.790,ns_1@10.242.238.88:<0.1534.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 723 into 'ns_1@10.242.238.91' is <18126.22710.0> [ns_server:debug,2014-08-19T16:50:05.791,ns_1@10.242.238.88:<0.1534.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 723 into 'ns_1@10.242.238.90' is <18125.21541.0> [rebalance:debug,2014-08-19T16:50:05.792,ns_1@10.242.238.88:<0.1526.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 723 is <0.1534.1> [views:debug,2014-08-19T16:50:05.805,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/743. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:05.806,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",743,active,0} [ns_server:debug,2014-08-19T16:50:05.824,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452605,815509}, tap_estimate, {replica_building,"default",723,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22710.0>, <<"replication_building_723_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:05.842,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452605,833461}, tap_estimate, {replica_building,"default",723,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21541.0>, <<"replication_building_723_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:05.842,ns_1@10.242.238.88:<0.1535.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.21541.0>}, {'ns_1@10.242.238.91',<18126.22710.0>}]) [rebalance:info,2014-08-19T16:50:05.842,ns_1@10.242.238.88:<0.1526.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:05.843,ns_1@10.242.238.88:<0.1526.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 723 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:05.843,ns_1@10.242.238.88:<0.1526.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:05.844,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{723, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:05.848,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{467, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:05.848,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",467, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.1547.1>) [ns_server:debug,2014-08-19T16:50:05.848,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 467) [ns_server:debug,2014-08-19T16:50:05.849,ns_1@10.242.238.88:<0.1548.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:05.849,ns_1@10.242.238.88:<0.1548.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:05.849,ns_1@10.242.238.88:<0.1547.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 467 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:05.849,ns_1@10.242.238.88:<0.1553.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 467 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:05.849,ns_1@10.242.238.88:<0.1554.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 467 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:05.853,ns_1@10.242.238.88:<0.1555.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 467 into 'ns_1@10.242.238.91' is <18126.22715.0> [ns_server:debug,2014-08-19T16:50:05.855,ns_1@10.242.238.88:<0.1555.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 467 into 'ns_1@10.242.238.89' is <18124.26426.0> [rebalance:debug,2014-08-19T16:50:05.856,ns_1@10.242.238.88:<0.1547.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 467 is <0.1555.1> [ns_server:debug,2014-08-19T16:50:05.889,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452605,880258}, tap_estimate, {replica_building,"default",467,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22715.0>, <<"replication_building_467_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:05.905,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452605,896425}, tap_estimate, {replica_building,"default",467,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26426.0>, <<"replication_building_467_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:05.906,ns_1@10.242.238.88:<0.1556.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26426.0>}, {'ns_1@10.242.238.91',<18126.22715.0>}]) [rebalance:info,2014-08-19T16:50:05.906,ns_1@10.242.238.88:<0.1547.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:05.906,ns_1@10.242.238.88:<0.1547.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 467 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:05.907,ns_1@10.242.238.88:<0.1547.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:05.907,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{467, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:05.912,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{976, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:05.912,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",976, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.1582.1>) [ns_server:debug,2014-08-19T16:50:05.912,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 976) [ns_server:debug,2014-08-19T16:50:05.912,ns_1@10.242.238.88:<0.1583.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:05.912,ns_1@10.242.238.88:<0.1583.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:05.913,ns_1@10.242.238.88:<0.1582.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 976 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:05.913,ns_1@10.242.238.88:<0.1588.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 976 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:05.913,ns_1@10.242.238.88:<0.1589.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 976 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:05.924,ns_1@10.242.238.88:<0.1590.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 976 into 'ns_1@10.242.238.90' is <18125.21561.0> [ns_server:debug,2014-08-19T16:50:05.927,ns_1@10.242.238.88:<0.1590.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 976 into 'ns_1@10.242.238.91' is <18126.22735.0> [rebalance:debug,2014-08-19T16:50:05.927,ns_1@10.242.238.88:<0.1582.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 976 is <0.1590.1> [ns_server:debug,2014-08-19T16:50:05.955,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 741. Nacking mccouch update. [views:debug,2014-08-19T16:50:05.955,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/741. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:05.956,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",741,active,0} [ns_server:debug,2014-08-19T16:50:05.957,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,412,284,957, 829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905, 777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308,981, 853,670,542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801, 746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694, 566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,408,280,953,825, 642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773, 718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432,304,977,849, 666,538,172,900,772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742, 614,248,120,976,848,482,354,899,771,716,588,222,950,822,767,456,328,873,690, 562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770,404,276,949,821, 766,638,144,872,506,378,1000,923,795,740,612,246,118,974,846,480,352,897,769, 714,586,220,948,820,765,454,326,999,871,688,560,194,922,794,428,300,973,909, 845,781,726,662,598,534,232,168,960,896,832,768,466,402,338,274,947,883,819, 764,700,636,572,206,142,1011,998,934,870,806,751,504,440,376,312,985,921,857, 793,738,674,610,546,244,180,116,972,908,844,780,478,414,350,286,959,895,831, 712,648,584,520,218,154,1023,946,882,818,763,452,388,324,260,1010,997,869, 686,558,192,920,792,426,298,971,843,660,532,166,894,400,272,1022,945,817,762, 634,140,996,868,502,374,919,791,736,608,242,114,970,842,476,348,893,710,582, 216,1021,944,816,761,450,322,995,867,684,556,190,918,790,424,296,969,841,658, 530,164,892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606, 240,112,968,840,474,346,891,708,580,214,1019,942,814,759,448,320,993,865,682, 554,188,916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630, 136,992,864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212, 1017,940,812,757,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526, 160,888,394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236, 108,964,836,470,342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550, 184,912,784,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132, 988,860,494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936, 808,753,442,314,987,859,676,548,182,910,782,416,288,961,833,650,522,156,884, 390,262,1012,935,807,752,624,130,986,858,492,364] [ns_server:debug,2014-08-19T16:50:05.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452605,951726}, tap_estimate, {replica_building,"default",976,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21561.0>, <<"replication_building_976_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:05.980,ns_1@10.242.238.88:<0.1591.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.22735.0>}, {'ns_1@10.242.238.90',<18125.21561.0>}]) [rebalance:info,2014-08-19T16:50:05.980,ns_1@10.242.238.88:<0.1582.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:05.980,ns_1@10.242.238.88:<0.1582.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 976 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:05.981,ns_1@10.242.238.88:<0.1582.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:05.982,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{976, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:05.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452605,970434}, tap_estimate, {replica_building,"default",976,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22735.0>, <<"replication_building_976_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:05.987,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{722, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:05.987,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",722, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.1603.1>) [ns_server:debug,2014-08-19T16:50:05.987,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 722) [ns_server:debug,2014-08-19T16:50:05.988,ns_1@10.242.238.88:<0.1604.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:05.988,ns_1@10.242.238.88:<0.1604.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:05.988,ns_1@10.242.238.88:<0.1603.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 722 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:05.988,ns_1@10.242.238.88:<0.1609.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 722 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:05.988,ns_1@10.242.238.88:<0.1610.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 722 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:05.993,ns_1@10.242.238.88:<0.1611.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 722 into 'ns_1@10.242.238.91' is <18126.22741.0> [ns_server:debug,2014-08-19T16:50:05.995,ns_1@10.242.238.88:<0.1611.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 722 into 'ns_1@10.242.238.90' is <18125.21566.0> [rebalance:debug,2014-08-19T16:50:05.995,ns_1@10.242.238.88:<0.1603.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 722 is <0.1611.1> [views:debug,2014-08-19T16:50:06.023,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/741. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:06.023,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",741,active,0} [ns_server:debug,2014-08-19T16:50:06.029,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,20452}, tap_estimate, {replica_building,"default",722,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22741.0>, <<"replication_building_722_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:06.046,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,36966}, tap_estimate, {replica_building,"default",722,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21566.0>, <<"replication_building_722_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:06.046,ns_1@10.242.238.88:<0.1612.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.21566.0>}, {'ns_1@10.242.238.91',<18126.22741.0>}]) [rebalance:info,2014-08-19T16:50:06.046,ns_1@10.242.238.88:<0.1603.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:06.047,ns_1@10.242.238.88:<0.1603.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 722 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:06.047,ns_1@10.242.238.88:<0.1603.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:06.048,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{722, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:06.052,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{466, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:06.052,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",466, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.1624.1>) [ns_server:debug,2014-08-19T16:50:06.052,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 466) [ns_server:debug,2014-08-19T16:50:06.052,ns_1@10.242.238.88:<0.1625.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:06.052,ns_1@10.242.238.88:<0.1625.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:06.053,ns_1@10.242.238.88:<0.1624.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 466 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:06.053,ns_1@10.242.238.88:<0.1630.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 466 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:06.053,ns_1@10.242.238.88:<0.1631.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 466 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:06.056,ns_1@10.242.238.88:<0.1632.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 466 into 'ns_1@10.242.238.91' is <18126.22746.0> [ns_server:debug,2014-08-19T16:50:06.059,ns_1@10.242.238.88:<0.1632.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 466 into 'ns_1@10.242.238.89' is <18124.26446.0> [rebalance:debug,2014-08-19T16:50:06.059,ns_1@10.242.238.88:<0.1624.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 466 is <0.1632.1> [ns_server:debug,2014-08-19T16:50:06.091,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,82920}, tap_estimate, {replica_building,"default",466,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22746.0>, <<"replication_building_466_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:06.107,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,98433}, tap_estimate, {replica_building,"default",466,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26446.0>, <<"replication_building_466_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:06.107,ns_1@10.242.238.88:<0.1633.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26446.0>}, {'ns_1@10.242.238.91',<18126.22746.0>}]) [rebalance:info,2014-08-19T16:50:06.107,ns_1@10.242.238.88:<0.1624.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:06.108,ns_1@10.242.238.88:<0.1624.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 466 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:06.108,ns_1@10.242.238.88:<0.1624.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:06.109,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{466, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:06.113,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{975, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:06.113,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",975, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.1653.1>) [ns_server:debug,2014-08-19T16:50:06.113,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 975) [ns_server:debug,2014-08-19T16:50:06.114,ns_1@10.242.238.88:<0.1656.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:06.114,ns_1@10.242.238.88:<0.1656.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:06.114,ns_1@10.242.238.88:<0.1653.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 975 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:06.114,ns_1@10.242.238.88:<0.1664.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 975 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:06.114,ns_1@10.242.238.88:<0.1665.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 975 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:06.118,ns_1@10.242.238.88:<0.1667.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 975 into 'ns_1@10.242.238.90' is <18125.21586.0> [ns_server:debug,2014-08-19T16:50:06.120,ns_1@10.242.238.88:<0.1667.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 975 into 'ns_1@10.242.238.91' is <18126.22765.0> [rebalance:debug,2014-08-19T16:50:06.120,ns_1@10.242.238.88:<0.1653.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 975 is <0.1667.1> [ns_server:debug,2014-08-19T16:50:06.154,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,145011}, tap_estimate, {replica_building,"default",975,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21586.0>, <<"replication_building_975_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:06.169,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,160912}, tap_estimate, {replica_building,"default",975,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22765.0>, <<"replication_building_975_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:06.170,ns_1@10.242.238.88:<0.1668.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.22765.0>}, {'ns_1@10.242.238.90',<18125.21586.0>}]) [rebalance:info,2014-08-19T16:50:06.170,ns_1@10.242.238.88:<0.1653.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:06.171,ns_1@10.242.238.88:<0.1653.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 975 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:06.171,ns_1@10.242.238.88:<0.1653.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:06.172,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{975, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:06.176,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{721, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:06.176,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",721, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.1680.1>) [ns_server:debug,2014-08-19T16:50:06.176,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 721) [ns_server:debug,2014-08-19T16:50:06.177,ns_1@10.242.238.88:<0.1681.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:06.177,ns_1@10.242.238.88:<0.1681.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:06.177,ns_1@10.242.238.88:<0.1680.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 721 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:06.177,ns_1@10.242.238.88:<0.1686.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 721 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:06.177,ns_1@10.242.238.88:<0.1687.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 721 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:06.180,ns_1@10.242.238.88:<0.1688.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 721 into 'ns_1@10.242.238.91' is <18126.22771.0> [ns_server:debug,2014-08-19T16:50:06.183,ns_1@10.242.238.88:<0.1688.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 721 into 'ns_1@10.242.238.90' is <18125.21591.0> [rebalance:debug,2014-08-19T16:50:06.183,ns_1@10.242.238.88:<0.1680.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 721 is <0.1688.1> [ns_server:debug,2014-08-19T16:50:06.198,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 739. Nacking mccouch update. [views:debug,2014-08-19T16:50:06.198,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/739. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:06.198,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",739,active,0} [ns_server:debug,2014-08-19T16:50:06.200,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,412,284,957, 829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905, 777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308,981, 853,670,542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801, 746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694, 566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,408,280,953,825, 642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773, 718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432,304,977,849, 666,538,172,900,772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742, 614,248,120,976,848,482,354,899,771,716,588,222,950,822,767,456,328,873,690, 562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770,404,276,949,821, 766,638,144,872,506,378,1000,923,795,740,612,246,118,974,846,480,352,897,769, 714,586,220,948,820,765,454,326,999,871,688,560,194,922,794,739,428,300,973, 845,662,534,168,960,896,832,768,466,402,338,274,947,883,819,764,700,636,572, 206,142,1011,998,934,870,806,751,504,440,376,312,985,921,857,793,738,674,610, 546,244,180,116,972,908,844,780,478,414,350,286,959,895,831,712,648,584,520, 218,154,1023,946,882,818,763,452,388,324,260,1010,997,869,686,558,192,920, 792,426,298,971,843,660,532,166,894,400,272,1022,945,817,762,634,140,996,868, 502,374,919,791,736,608,242,114,970,842,476,348,893,710,582,216,1021,944,816, 761,450,322,995,867,684,556,190,918,790,424,296,969,841,658,530,164,892,398, 270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606,240,112,968,840, 474,346,891,708,580,214,1019,942,814,759,448,320,993,865,682,554,188,916,788, 422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992,864,498, 370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812,757, 446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888,394,266, 1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470, 342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,418, 290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494,366, 911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314, 987,859,676,548,182,910,782,416,288,961,833,650,522,156,884,390,262,1012,935, 807,752,624,130,986,858,492,364,909,781,726,598,232] [ns_server:debug,2014-08-19T16:50:06.216,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,207565}, tap_estimate, {replica_building,"default",721,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22771.0>, <<"replication_building_721_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:06.233,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,224751}, tap_estimate, {replica_building,"default",721,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21591.0>, <<"replication_building_721_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:06.234,ns_1@10.242.238.88:<0.1689.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.21591.0>}, {'ns_1@10.242.238.91',<18126.22771.0>}]) [rebalance:info,2014-08-19T16:50:06.234,ns_1@10.242.238.88:<0.1680.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:06.235,ns_1@10.242.238.88:<0.1680.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 721 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:06.235,ns_1@10.242.238.88:<0.1680.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:06.236,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{721, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:06.240,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{465, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:06.240,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",465, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.1701.1>) [ns_server:debug,2014-08-19T16:50:06.240,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 465) [ns_server:debug,2014-08-19T16:50:06.240,ns_1@10.242.238.88:<0.1702.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:06.241,ns_1@10.242.238.88:<0.1702.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:06.241,ns_1@10.242.238.88:<0.1701.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 465 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:06.241,ns_1@10.242.238.88:<0.1707.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 465 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:06.241,ns_1@10.242.238.88:<0.1708.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 465 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:06.245,ns_1@10.242.238.88:<0.1709.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 465 into 'ns_1@10.242.238.91' is <18126.22790.0> [ns_server:debug,2014-08-19T16:50:06.247,ns_1@10.242.238.88:<0.1709.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 465 into 'ns_1@10.242.238.89' is <18124.26466.0> [rebalance:debug,2014-08-19T16:50:06.247,ns_1@10.242.238.88:<0.1701.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 465 is <0.1709.1> [views:debug,2014-08-19T16:50:06.267,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/739. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:06.267,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",739,active,0} [ns_server:debug,2014-08-19T16:50:06.281,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,272460}, tap_estimate, {replica_building,"default",465,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22790.0>, <<"replication_building_465_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:06.299,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,290182}, tap_estimate, {replica_building,"default",465,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26466.0>, <<"replication_building_465_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:06.299,ns_1@10.242.238.88:<0.1710.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26466.0>}, {'ns_1@10.242.238.91',<18126.22790.0>}]) [rebalance:info,2014-08-19T16:50:06.299,ns_1@10.242.238.88:<0.1701.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:06.300,ns_1@10.242.238.88:<0.1701.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 465 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:06.300,ns_1@10.242.238.88:<0.1701.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:06.301,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{465, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:06.307,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{974, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:06.307,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",974, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.1730.1>) [ns_server:debug,2014-08-19T16:50:06.308,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 974) [ns_server:debug,2014-08-19T16:50:06.308,ns_1@10.242.238.88:<0.1731.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:06.308,ns_1@10.242.238.88:<0.1731.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:06.308,ns_1@10.242.238.88:<0.1730.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 974 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:06.308,ns_1@10.242.238.88:<0.1736.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 974 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:06.309,ns_1@10.242.238.88:<0.1737.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 974 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:06.313,ns_1@10.242.238.88:<0.1744.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 974 into 'ns_1@10.242.238.90' is <18125.21611.0> [ns_server:debug,2014-08-19T16:50:06.315,ns_1@10.242.238.88:<0.1744.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 974 into 'ns_1@10.242.238.91' is <18126.22795.0> [rebalance:debug,2014-08-19T16:50:06.315,ns_1@10.242.238.88:<0.1730.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 974 is <0.1744.1> [ns_server:debug,2014-08-19T16:50:06.343,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 737. Nacking mccouch update. [views:debug,2014-08-19T16:50:06.343,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/737. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:06.344,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,412,284,957, 829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905, 777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308,981, 853,670,542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801, 746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694, 566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,408,280,953,825, 642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773, 718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432,304,977,849, 666,538,172,900,772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742, 614,248,120,976,848,482,354,899,771,716,588,222,950,822,767,456,328,873,690, 562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770,404,276,949,821, 766,638,144,872,506,378,1000,923,795,740,612,246,118,974,846,480,352,897,769, 714,586,220,948,820,765,454,326,999,871,688,560,194,922,794,739,428,300,973, 845,662,534,168,960,896,832,768,466,402,338,274,947,883,819,764,700,636,572, 206,142,1011,998,934,870,806,751,504,440,376,312,985,921,857,793,738,674,610, 546,244,180,116,972,908,844,780,478,414,350,286,959,895,831,712,648,584,520, 218,154,1023,946,882,818,763,452,388,324,260,1010,997,869,686,558,192,920, 792,737,426,298,971,843,660,532,166,894,400,272,1022,945,817,762,634,140,996, 868,502,374,919,791,736,608,242,114,970,842,476,348,893,710,582,216,1021,944, 816,761,450,322,995,867,684,556,190,918,790,424,296,969,841,658,530,164,892, 398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606,240,112,968, 840,474,346,891,708,580,214,1019,942,814,759,448,320,993,865,682,554,188,916, 788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992,864, 498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812, 757,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888,394, 266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836, 470,342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784, 418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494, 366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442, 314,987,859,676,548,182,910,782,416,288,961,833,650,522,156,884,390,262,1012, 935,807,752,624,130,986,858,492,364,909,781,726,598,232] [ns_server:debug,2014-08-19T16:50:06.345,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",737,active,0} [ns_server:debug,2014-08-19T16:50:06.350,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,341200}, tap_estimate, {replica_building,"default",974,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21611.0>, <<"replication_building_974_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:06.366,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,357247}, tap_estimate, {replica_building,"default",974,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22795.0>, <<"replication_building_974_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:06.366,ns_1@10.242.238.88:<0.1745.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.22795.0>}, {'ns_1@10.242.238.90',<18125.21611.0>}]) [rebalance:info,2014-08-19T16:50:06.366,ns_1@10.242.238.88:<0.1730.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:06.367,ns_1@10.242.238.88:<0.1730.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 974 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:06.367,ns_1@10.242.238.88:<0.1730.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:06.368,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{974, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:06.372,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{720, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:06.372,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",720, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.1760.1>) [ns_server:debug,2014-08-19T16:50:06.373,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 720) [ns_server:debug,2014-08-19T16:50:06.373,ns_1@10.242.238.88:<0.1761.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:06.373,ns_1@10.242.238.88:<0.1761.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:06.373,ns_1@10.242.238.88:<0.1760.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 720 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:06.373,ns_1@10.242.238.88:<0.1766.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 720 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:06.374,ns_1@10.242.238.88:<0.1767.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 720 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [views:debug,2014-08-19T16:50:06.377,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/737. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:06.377,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",737,active,0} [ns_server:debug,2014-08-19T16:50:06.377,ns_1@10.242.238.88:<0.1773.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 720 into 'ns_1@10.242.238.91' is <18126.22801.0> [ns_server:debug,2014-08-19T16:50:06.379,ns_1@10.242.238.88:<0.1773.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 720 into 'ns_1@10.242.238.90' is <18125.21616.0> [rebalance:debug,2014-08-19T16:50:06.379,ns_1@10.242.238.88:<0.1760.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 720 is <0.1773.1> [ns_server:debug,2014-08-19T16:50:06.413,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,404024}, tap_estimate, {replica_building,"default",720,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22801.0>, <<"replication_building_720_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:06.427,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,418902}, tap_estimate, {replica_building,"default",720,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21616.0>, <<"replication_building_720_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:06.428,ns_1@10.242.238.88:<0.1774.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.21616.0>}, {'ns_1@10.242.238.91',<18126.22801.0>}]) [rebalance:info,2014-08-19T16:50:06.428,ns_1@10.242.238.88:<0.1760.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:06.429,ns_1@10.242.238.88:<0.1760.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 720 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:06.429,ns_1@10.242.238.88:<0.1760.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:06.430,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{720, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:06.435,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{464, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:06.435,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",464, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.1802.1>) [ns_server:debug,2014-08-19T16:50:06.436,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 464) [ns_server:debug,2014-08-19T16:50:06.436,ns_1@10.242.238.88:<0.1803.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:06.436,ns_1@10.242.238.88:<0.1803.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:06.436,ns_1@10.242.238.88:<0.1802.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 464 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:06.436,ns_1@10.242.238.88:<0.1808.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 464 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:06.437,ns_1@10.242.238.88:<0.1809.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 464 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:06.440,ns_1@10.242.238.88:<0.1810.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 464 into 'ns_1@10.242.238.91' is <18126.22820.0> [ns_server:debug,2014-08-19T16:50:06.443,ns_1@10.242.238.88:<0.1810.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 464 into 'ns_1@10.242.238.89' is <18124.26486.0> [rebalance:debug,2014-08-19T16:50:06.443,ns_1@10.242.238.88:<0.1802.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 464 is <0.1810.1> [ns_server:debug,2014-08-19T16:50:06.452,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 735. Nacking mccouch update. [views:debug,2014-08-19T16:50:06.452,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/735. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:06.452,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",735,active,0} [ns_server:debug,2014-08-19T16:50:06.453,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,412,284,957, 829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905, 777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308,981, 853,670,542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801, 746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694, 566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,408,280,953,825, 642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773, 718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432,304,977,849, 666,538,172,900,772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742, 614,248,120,976,848,482,354,899,771,716,588,222,950,822,767,456,328,873,690, 562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770,404,276,949,821, 766,638,144,872,506,378,1000,923,795,740,612,246,118,974,846,480,352,897,769, 714,586,220,948,820,765,454,326,999,871,688,560,194,922,794,739,428,300,973, 845,662,534,168,960,896,832,768,466,402,338,274,947,883,819,764,700,636,572, 206,142,1011,998,934,870,806,751,504,440,376,312,985,921,857,793,738,674,610, 546,244,180,116,972,908,844,780,478,414,350,286,959,895,831,712,648,584,520, 218,154,1023,946,882,818,763,452,388,324,260,1010,997,869,686,558,192,920, 792,737,426,298,971,843,660,532,166,894,400,272,1022,945,817,762,634,140,996, 868,502,374,919,791,736,608,242,114,970,842,476,348,893,710,582,216,1021,944, 816,761,450,322,995,867,684,556,190,918,790,735,424,296,969,841,658,530,164, 892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606,240,112, 968,840,474,346,891,708,580,214,1019,942,814,759,448,320,993,865,682,554,188, 916,788,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992, 864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940, 812,757,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160,888, 394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964, 836,470,342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912, 784,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860, 494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753, 442,314,987,859,676,548,182,910,782,416,288,961,833,650,522,156,884,390,262, 1012,935,807,752,624,130,986,858,492,364,909,781,726,598,232] [ns_server:debug,2014-08-19T16:50:06.477,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,468188}, tap_estimate, {replica_building,"default",464,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22820.0>, <<"replication_building_464_'ns_1@10.242.238.91'">>} [views:debug,2014-08-19T16:50:06.486,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/735. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:06.486,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",735,active,0} [ns_server:debug,2014-08-19T16:50:06.493,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,484294}, tap_estimate, {replica_building,"default",464,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26486.0>, <<"replication_building_464_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:06.493,ns_1@10.242.238.88:<0.1811.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26486.0>}, {'ns_1@10.242.238.91',<18126.22820.0>}]) [rebalance:info,2014-08-19T16:50:06.494,ns_1@10.242.238.88:<0.1802.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:06.494,ns_1@10.242.238.88:<0.1802.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 464 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:06.495,ns_1@10.242.238.88:<0.1802.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:06.495,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{464, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:06.499,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{973, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:06.499,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",973, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.1823.1>) [ns_server:debug,2014-08-19T16:50:06.500,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 973) [ns_server:debug,2014-08-19T16:50:06.500,ns_1@10.242.238.88:<0.1824.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:06.500,ns_1@10.242.238.88:<0.1824.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:06.500,ns_1@10.242.238.88:<0.1823.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 973 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:06.500,ns_1@10.242.238.88:<0.1829.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 973 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:06.500,ns_1@10.242.238.88:<0.1830.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 973 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:06.505,ns_1@10.242.238.88:<0.1831.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 973 into 'ns_1@10.242.238.90' is <18125.21636.0> [ns_server:debug,2014-08-19T16:50:06.507,ns_1@10.242.238.88:<0.1831.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 973 into 'ns_1@10.242.238.91' is <18126.22825.0> [rebalance:debug,2014-08-19T16:50:06.507,ns_1@10.242.238.88:<0.1823.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 973 is <0.1831.1> [ns_server:debug,2014-08-19T16:50:06.542,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,533327}, tap_estimate, {replica_building,"default",973,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21636.0>, <<"replication_building_973_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:06.557,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,548235}, tap_estimate, {replica_building,"default",973,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22825.0>, <<"replication_building_973_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:06.557,ns_1@10.242.238.88:<0.1832.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.22825.0>}, {'ns_1@10.242.238.90',<18125.21636.0>}]) [rebalance:info,2014-08-19T16:50:06.557,ns_1@10.242.238.88:<0.1823.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:06.558,ns_1@10.242.238.88:<0.1823.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 973 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:06.558,ns_1@10.242.238.88:<0.1823.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:06.559,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{973, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:06.561,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 733. Nacking mccouch update. [views:debug,2014-08-19T16:50:06.561,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/733. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:06.561,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",733,active,0} [ns_server:debug,2014-08-19T16:50:06.562,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,412,284,957, 829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905, 777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308,981, 853,670,542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801, 746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694, 566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,408,280,953,825, 642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773, 718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432,304,977,849, 666,538,172,900,772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742, 614,248,120,976,848,482,354,899,771,716,588,222,950,822,767,456,328,873,690, 562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770,404,276,949,821, 766,638,144,872,506,378,1000,923,795,740,612,246,118,974,846,480,352,897,769, 714,586,220,948,820,765,454,326,999,871,688,560,194,922,794,739,428,300,973, 845,662,534,168,960,896,832,768,466,402,338,274,947,883,819,764,700,636,572, 206,142,1011,998,934,870,806,751,504,440,376,312,985,921,857,793,738,674,610, 546,244,180,116,972,908,844,780,478,414,350,286,959,895,831,712,648,584,520, 218,154,1023,946,882,818,763,452,388,324,260,1010,997,869,686,558,192,920, 792,737,426,298,971,843,660,532,166,894,400,272,1022,945,817,762,634,140,996, 868,502,374,919,791,736,608,242,114,970,842,476,348,893,710,582,216,1021,944, 816,761,450,322,995,867,684,556,190,918,790,735,424,296,969,841,658,530,164, 892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606,240,112, 968,840,474,346,891,708,580,214,1019,942,814,759,448,320,993,865,682,554,188, 916,788,733,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136, 992,864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017, 940,812,757,446,318,991,863,680,552,186,914,786,420,292,965,837,654,526,160, 888,394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108, 964,836,470,342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184, 912,784,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988, 860,494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808, 753,442,314,987,859,676,548,182,910,782,416,288,961,833,650,522,156,884,390, 262,1012,935,807,752,624,130,986,858,492,364,909,781,726,598,232] [ns_server:debug,2014-08-19T16:50:06.563,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{719, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:06.563,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",719, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.1858.1>) [ns_server:debug,2014-08-19T16:50:06.564,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 719) [ns_server:debug,2014-08-19T16:50:06.564,ns_1@10.242.238.88:<0.1859.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:06.565,ns_1@10.242.238.88:<0.1859.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:06.565,ns_1@10.242.238.88:<0.1858.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 719 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:06.565,ns_1@10.242.238.88:<0.1864.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 719 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:06.565,ns_1@10.242.238.88:<0.1865.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 719 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:06.569,ns_1@10.242.238.88:<0.1866.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 719 into 'ns_1@10.242.238.91' is <18126.22845.0> [ns_server:debug,2014-08-19T16:50:06.570,ns_1@10.242.238.88:<0.1866.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 719 into 'ns_1@10.242.238.90' is <18125.21656.0> [rebalance:debug,2014-08-19T16:50:06.571,ns_1@10.242.238.88:<0.1858.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 719 is <0.1866.1> [ns_server:debug,2014-08-19T16:50:06.607,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,598152}, tap_estimate, {replica_building,"default",719,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22845.0>, <<"replication_building_719_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:06.619,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,610914}, tap_estimate, {replica_building,"default",719,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21656.0>, <<"replication_building_719_'ns_1@10.242.238.90'">>} [views:debug,2014-08-19T16:50:06.620,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/733. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:06.620,ns_1@10.242.238.88:<0.1867.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.21656.0>}, {'ns_1@10.242.238.91',<18126.22845.0>}]) [ns_server:debug,2014-08-19T16:50:06.620,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",733,active,0} [rebalance:info,2014-08-19T16:50:06.620,ns_1@10.242.238.88:<0.1858.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:06.621,ns_1@10.242.238.88:<0.1858.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 719 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:06.621,ns_1@10.242.238.88:<0.1858.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:06.622,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{719, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:06.626,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{463, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:06.626,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",463, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.1880.1>) [ns_server:debug,2014-08-19T16:50:06.626,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 463) [ns_server:debug,2014-08-19T16:50:06.627,ns_1@10.242.238.88:<0.1881.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:06.627,ns_1@10.242.238.88:<0.1881.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:06.627,ns_1@10.242.238.88:<0.1880.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 463 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:06.627,ns_1@10.242.238.88:<0.1886.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 463 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:06.627,ns_1@10.242.238.88:<0.1887.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 463 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:06.631,ns_1@10.242.238.88:<0.1888.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 463 into 'ns_1@10.242.238.91' is <18126.22866.0> [ns_server:debug,2014-08-19T16:50:06.634,ns_1@10.242.238.88:<0.1888.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 463 into 'ns_1@10.242.238.89' is <18124.26506.0> [rebalance:debug,2014-08-19T16:50:06.634,ns_1@10.242.238.88:<0.1880.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 463 is <0.1888.1> [ns_server:debug,2014-08-19T16:50:06.668,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,659067}, tap_estimate, {replica_building,"default",463,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22866.0>, <<"replication_building_463_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:06.682,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,673112}, tap_estimate, {replica_building,"default",463,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26506.0>, <<"replication_building_463_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:06.682,ns_1@10.242.238.88:<0.1889.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26506.0>}, {'ns_1@10.242.238.91',<18126.22866.0>}]) [rebalance:info,2014-08-19T16:50:06.682,ns_1@10.242.238.88:<0.1880.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:06.683,ns_1@10.242.238.88:<0.1880.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 463 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:06.683,ns_1@10.242.238.88:<0.1880.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:06.684,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{463, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:06.688,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{972, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:06.688,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",972, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.1915.1>) [ns_server:debug,2014-08-19T16:50:06.688,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 972) [ns_server:debug,2014-08-19T16:50:06.688,ns_1@10.242.238.88:<0.1916.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:06.689,ns_1@10.242.238.88:<0.1916.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:06.689,ns_1@10.242.238.88:<0.1915.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 972 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:06.689,ns_1@10.242.238.88:<0.1921.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 972 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:06.689,ns_1@10.242.238.88:<0.1922.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 972 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:06.693,ns_1@10.242.238.88:<0.1923.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 972 into 'ns_1@10.242.238.90' is <18125.21676.0> [ns_server:debug,2014-08-19T16:50:06.695,ns_1@10.242.238.88:<0.1923.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 972 into 'ns_1@10.242.238.91' is <18126.22871.0> [rebalance:debug,2014-08-19T16:50:06.695,ns_1@10.242.238.88:<0.1915.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 972 is <0.1923.1> [ns_server:debug,2014-08-19T16:50:06.721,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 731. Nacking mccouch update. [views:debug,2014-08-19T16:50:06.721,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/731. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:06.721,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",731,active,0} [ns_server:debug,2014-08-19T16:50:06.723,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,412,284,957, 829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905, 777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308,981, 853,670,542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801, 746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694, 566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,408,280,953,825, 642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773, 718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432,304,977,849, 666,538,172,900,772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742, 614,248,120,976,848,482,354,899,771,716,588,222,950,822,767,456,328,873,690, 562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770,404,276,949,821, 766,638,144,872,506,378,1000,923,795,740,612,246,118,974,846,480,352,897,769, 714,586,220,948,820,765,454,326,999,871,688,560,194,922,794,739,428,300,973, 845,662,534,168,960,896,832,768,466,402,338,274,947,883,819,764,700,636,572, 206,142,1011,998,934,870,806,751,504,440,376,312,985,921,857,793,738,674,610, 546,244,180,116,972,908,844,780,478,414,350,286,959,895,831,712,648,584,520, 218,154,1023,946,882,818,763,452,388,324,260,1010,997,869,686,558,192,920, 792,737,426,298,971,843,660,532,166,894,400,272,1022,945,817,762,634,140,996, 868,502,374,919,791,736,608,242,114,970,842,476,348,893,710,582,216,1021,944, 816,761,450,322,995,867,684,556,190,918,790,735,424,296,969,841,658,530,164, 892,398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606,240,112, 968,840,474,346,891,708,580,214,1019,942,814,759,448,320,993,865,682,554,188, 916,788,733,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136, 992,864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017, 940,812,757,446,318,991,863,680,552,186,914,786,731,420,292,965,837,654,526, 160,888,394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236, 108,964,836,470,342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550, 184,912,784,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132, 988,860,494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936, 808,753,442,314,987,859,676,548,182,910,782,416,288,961,833,650,522,156,884, 390,262,1012,935,807,752,624,130,986,858,492,364,909,781,726,598,232] [ns_server:debug,2014-08-19T16:50:06.729,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,720367}, tap_estimate, {replica_building,"default",972,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21676.0>, <<"replication_building_972_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:06.747,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,738413}, tap_estimate, {replica_building,"default",972,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22871.0>, <<"replication_building_972_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:06.747,ns_1@10.242.238.88:<0.1924.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.22871.0>}, {'ns_1@10.242.238.90',<18125.21676.0>}]) [rebalance:info,2014-08-19T16:50:06.748,ns_1@10.242.238.88:<0.1915.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:06.748,ns_1@10.242.238.88:<0.1915.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 972 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:06.749,ns_1@10.242.238.88:<0.1915.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:06.750,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{972, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:06.754,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{718, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:06.754,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",718, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.1936.1>) [ns_server:debug,2014-08-19T16:50:06.754,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 718) [ns_server:debug,2014-08-19T16:50:06.755,ns_1@10.242.238.88:<0.1937.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:06.755,ns_1@10.242.238.88:<0.1937.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:06.755,ns_1@10.242.238.88:<0.1936.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 718 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:06.755,ns_1@10.242.238.88:<0.1942.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 718 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:06.755,ns_1@10.242.238.88:<0.1943.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 718 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [views:debug,2014-08-19T16:50:06.756,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/731. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:06.756,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",731,active,0} [ns_server:debug,2014-08-19T16:50:06.759,ns_1@10.242.238.88:<0.1944.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 718 into 'ns_1@10.242.238.91' is <18126.22891.0> [ns_server:debug,2014-08-19T16:50:06.762,ns_1@10.242.238.88:<0.1944.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 718 into 'ns_1@10.242.238.90' is <18125.21681.0> [rebalance:debug,2014-08-19T16:50:06.762,ns_1@10.242.238.88:<0.1936.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 718 is <0.1944.1> [ns_server:debug,2014-08-19T16:50:06.797,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,788381}, tap_estimate, {replica_building,"default",718,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22891.0>, <<"replication_building_718_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:06.811,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,802238}, tap_estimate, {replica_building,"default",718,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21681.0>, <<"replication_building_718_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:06.811,ns_1@10.242.238.88:<0.1945.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.21681.0>}, {'ns_1@10.242.238.91',<18126.22891.0>}]) [rebalance:info,2014-08-19T16:50:06.811,ns_1@10.242.238.88:<0.1936.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:06.812,ns_1@10.242.238.88:<0.1936.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 718 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:06.813,ns_1@10.242.238.88:<0.1936.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:06.813,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{718, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:06.817,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{462, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:06.817,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",462, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.1957.1>) [ns_server:debug,2014-08-19T16:50:06.817,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 462) [ns_server:debug,2014-08-19T16:50:06.818,ns_1@10.242.238.88:<0.1958.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:06.818,ns_1@10.242.238.88:<0.1958.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:06.818,ns_1@10.242.238.88:<0.1957.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 462 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:06.818,ns_1@10.242.238.88:<0.1963.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 462 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:06.818,ns_1@10.242.238.88:<0.1964.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 462 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:06.822,ns_1@10.242.238.88:<0.1965.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 462 into 'ns_1@10.242.238.91' is <18126.22896.0> [ns_server:debug,2014-08-19T16:50:06.824,ns_1@10.242.238.88:<0.1965.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 462 into 'ns_1@10.242.238.89' is <18124.26526.0> [rebalance:debug,2014-08-19T16:50:06.824,ns_1@10.242.238.88:<0.1957.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 462 is <0.1965.1> [rebalance:info,2014-08-19T16:50:06.857,ns_1@10.242.238.88:<0.1802.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 464 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:06.858,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 464 state to active [ns_server:debug,2014-08-19T16:50:06.858,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,849361}, tap_estimate, {replica_building,"default",462,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22896.0>, <<"replication_building_462_'ns_1@10.242.238.91'">>} [rebalance:info,2014-08-19T16:50:06.859,ns_1@10.242.238.88:<0.1802.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 464 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:06.859,ns_1@10.242.238.88:<0.1802.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:06.865,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 729. Nacking mccouch update. [views:debug,2014-08-19T16:50:06.865,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/729. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:06.865,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",729,active,0} [ns_server:debug,2014-08-19T16:50:06.867,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,412,284,957, 829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905, 777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308,981, 853,670,542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801, 746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694, 566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,408,280,953,825, 642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773, 718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432,304,977,849, 666,538,172,900,772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742, 614,248,120,976,848,482,354,899,771,716,588,222,950,822,767,456,328,873,690, 562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770,404,276,949,821, 766,638,144,872,506,378,1000,923,795,740,612,246,118,974,846,480,352,897,769, 714,586,220,948,820,765,454,326,999,871,688,560,194,922,794,739,428,300,973, 845,662,534,168,896,768,402,274,947,883,819,764,700,636,572,206,142,1011,998, 934,870,806,751,504,440,376,312,985,921,857,793,738,674,610,546,244,180,116, 972,908,844,780,478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946, 882,818,763,452,388,324,260,1010,997,869,686,558,192,920,792,737,426,298,971, 843,660,532,166,894,400,272,1022,945,817,762,634,140,996,868,502,374,919,791, 736,608,242,114,970,842,476,348,893,710,582,216,1021,944,816,761,450,322,995, 867,684,556,190,918,790,735,424,296,969,841,658,530,164,892,398,270,1020,943, 815,760,632,138,994,866,500,372,917,789,734,606,240,112,968,840,474,346,891, 708,580,214,1019,942,814,759,448,320,993,865,682,554,188,916,788,733,422,294, 967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992,864,498,370,915, 787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318, 991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,394,266,1016, 939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470,342, 887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418, 290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494,366, 911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314, 987,859,676,548,182,910,782,416,288,961,833,650,522,156,884,390,262,1012,935, 807,752,624,130,986,858,492,364,909,781,726,598,232,960,832,466,338] [ns_server:debug,2014-08-19T16:50:06.873,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,864377}, tap_estimate, {replica_building,"default",462,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26526.0>, <<"replication_building_462_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:06.873,ns_1@10.242.238.88:<0.1966.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26526.0>}, {'ns_1@10.242.238.91',<18126.22896.0>}]) [rebalance:info,2014-08-19T16:50:06.874,ns_1@10.242.238.88:<0.1957.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:06.874,ns_1@10.242.238.88:<0.1957.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 462 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:06.875,ns_1@10.242.238.88:<0.1957.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:06.875,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{462, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:06.879,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{971, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:06.879,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",971, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.1996.1>) [ns_server:debug,2014-08-19T16:50:06.879,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 971) [ns_server:debug,2014-08-19T16:50:06.880,ns_1@10.242.238.88:<0.1997.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:06.880,ns_1@10.242.238.88:<0.1997.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:06.880,ns_1@10.242.238.88:<0.1996.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 971 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:06.880,ns_1@10.242.238.88:<0.2002.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 971 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:06.880,ns_1@10.242.238.88:<0.2003.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 971 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:06.884,ns_1@10.242.238.88:<0.2004.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 971 into 'ns_1@10.242.238.90' is <18125.21701.0> [ns_server:debug,2014-08-19T16:50:06.887,ns_1@10.242.238.88:<0.2004.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 971 into 'ns_1@10.242.238.91' is <18126.22904.0> [rebalance:debug,2014-08-19T16:50:06.887,ns_1@10.242.238.88:<0.1996.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 971 is <0.2004.1> [views:debug,2014-08-19T16:50:06.916,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/729. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:06.916,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",729,active,0} [ns_server:debug,2014-08-19T16:50:06.920,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,911341}, tap_estimate, {replica_building,"default",971,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21701.0>, <<"replication_building_971_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:06.936,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,927827}, tap_estimate, {replica_building,"default",971,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22904.0>, <<"replication_building_971_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:06.937,ns_1@10.242.238.88:<0.2005.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.22904.0>}, {'ns_1@10.242.238.90',<18125.21701.0>}]) [rebalance:info,2014-08-19T16:50:06.937,ns_1@10.242.238.88:<0.1996.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:06.938,ns_1@10.242.238.88:<0.1996.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 971 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:06.938,ns_1@10.242.238.88:<0.1996.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:06.939,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{971, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:06.943,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{717, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:06.943,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",717, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.2017.1>) [ns_server:debug,2014-08-19T16:50:06.943,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 717) [ns_server:debug,2014-08-19T16:50:06.943,ns_1@10.242.238.88:<0.2018.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:06.944,ns_1@10.242.238.88:<0.2018.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:06.944,ns_1@10.242.238.88:<0.2017.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 717 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:06.944,ns_1@10.242.238.88:<0.2023.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 717 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:06.944,ns_1@10.242.238.88:<0.2024.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 717 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:06.947,ns_1@10.242.238.88:<0.2025.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 717 into 'ns_1@10.242.238.91' is <18126.22924.0> [ns_server:debug,2014-08-19T16:50:06.950,ns_1@10.242.238.88:<0.2025.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 717 into 'ns_1@10.242.238.90' is <18125.21706.0> [rebalance:debug,2014-08-19T16:50:06.950,ns_1@10.242.238.88:<0.2017.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 717 is <0.2025.1> [ns_server:debug,2014-08-19T16:50:06.985,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,976166}, tap_estimate, {replica_building,"default",717,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22924.0>, <<"replication_building_717_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:07.000,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452606,991299}, tap_estimate, {replica_building,"default",717,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21706.0>, <<"replication_building_717_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:07.000,ns_1@10.242.238.88:<0.2026.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.21706.0>}, {'ns_1@10.242.238.91',<18126.22924.0>}]) [rebalance:info,2014-08-19T16:50:07.001,ns_1@10.242.238.88:<0.2017.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:07.001,ns_1@10.242.238.88:<0.2017.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 717 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:07.002,ns_1@10.242.238.88:<0.2017.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:07.002,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{717, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:07.006,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{461, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:07.006,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",461, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.2052.1>) [ns_server:debug,2014-08-19T16:50:07.007,ns_1@10.242.238.88:<0.2053.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:07.007,ns_1@10.242.238.88:<0.2053.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:07.007,ns_1@10.242.238.88:<0.2052.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 461 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [ns_server:debug,2014-08-19T16:50:07.007,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 727. Nacking mccouch update. [rebalance:info,2014-08-19T16:50:07.007,ns_1@10.242.238.88:<0.2058.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 461 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:07.007,ns_1@10.242.238.88:<0.2059.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 461 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [views:debug,2014-08-19T16:50:07.008,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/727. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:07.008,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",727,active,0} [ns_server:debug,2014-08-19T16:50:07.008,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 461) [ns_server:debug,2014-08-19T16:50:07.010,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,412,284,957, 829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905, 777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308,981, 853,670,542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801, 746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694, 566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,408,280,953,825, 642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773, 718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432,304,977,849, 666,538,172,900,772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742, 614,248,120,976,848,482,354,899,771,716,588,222,950,822,767,456,328,873,690, 562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770,404,276,949,821, 766,638,144,872,506,378,1000,923,795,740,612,246,118,974,846,480,352,897,769, 714,586,220,948,820,765,454,326,999,871,688,560,194,922,794,739,428,300,973, 845,662,534,168,896,768,402,274,947,883,819,764,700,636,572,206,142,1011,998, 934,870,806,751,504,440,376,312,985,921,857,793,738,674,610,546,244,180,116, 972,908,844,780,478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946, 882,818,763,452,388,324,260,1010,997,869,686,558,192,920,792,737,426,298,971, 843,660,532,166,894,400,272,1022,945,817,762,634,140,996,868,502,374,919,791, 736,608,242,114,970,842,476,348,893,710,582,216,1021,944,816,761,450,322,995, 867,684,556,190,918,790,735,424,296,969,841,658,530,164,892,398,270,1020,943, 815,760,632,138,994,866,500,372,917,789,734,606,240,112,968,840,474,346,891, 708,580,214,1019,942,814,759,448,320,993,865,682,554,188,916,788,733,422,294, 967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992,864,498,370,915, 787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318, 991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,394,266,1016, 939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470,342, 887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418, 290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494,366, 911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314, 987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,390,262,1012, 935,807,752,624,130,986,858,492,364,909,781,726,598,232,960,832,466,338] [ns_server:debug,2014-08-19T16:50:07.012,ns_1@10.242.238.88:<0.2060.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 461 into 'ns_1@10.242.238.91' is <18126.22929.0> [ns_server:debug,2014-08-19T16:50:07.015,ns_1@10.242.238.88:<0.2060.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 461 into 'ns_1@10.242.238.89' is <18124.26550.0> [rebalance:debug,2014-08-19T16:50:07.015,ns_1@10.242.238.88:<0.2052.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 461 is <0.2060.1> [views:debug,2014-08-19T16:50:07.041,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/727. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:07.042,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",727,active,0} [ns_server:debug,2014-08-19T16:50:07.050,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,41486}, tap_estimate, {replica_building,"default",461,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22929.0>, <<"replication_building_461_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:07.067,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,58608}, tap_estimate, {replica_building,"default",461,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26550.0>, <<"replication_building_461_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:07.068,ns_1@10.242.238.88:<0.2061.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26550.0>}, {'ns_1@10.242.238.91',<18126.22929.0>}]) [rebalance:info,2014-08-19T16:50:07.068,ns_1@10.242.238.88:<0.2052.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:07.068,ns_1@10.242.238.88:<0.2052.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 461 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:07.069,ns_1@10.242.238.88:<0.2052.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:07.069,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{461, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:07.075,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{970, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:07.076,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",970, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.2073.1>) [ns_server:debug,2014-08-19T16:50:07.076,ns_1@10.242.238.88:<0.2074.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:07.076,ns_1@10.242.238.88:<0.2074.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:07.076,ns_1@10.242.238.88:<0.2073.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 970 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:07.077,ns_1@10.242.238.88:<0.2079.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 970 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:07.077,ns_1@10.242.238.88:<0.2080.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 970 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:07.079,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 970) [ns_server:debug,2014-08-19T16:50:07.081,ns_1@10.242.238.88:<0.2081.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 970 into 'ns_1@10.242.238.90' is <18125.21726.0> [ns_server:debug,2014-08-19T16:50:07.083,ns_1@10.242.238.88:<0.2081.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 970 into 'ns_1@10.242.238.91' is <18126.22934.0> [rebalance:debug,2014-08-19T16:50:07.083,ns_1@10.242.238.88:<0.2073.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 970 is <0.2081.1> [ns_server:debug,2014-08-19T16:50:07.116,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,107562}, tap_estimate, {replica_building,"default",970,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21726.0>, <<"replication_building_970_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:07.132,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,123014}, tap_estimate, {replica_building,"default",970,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22934.0>, <<"replication_building_970_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:07.132,ns_1@10.242.238.88:<0.2082.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.22934.0>}, {'ns_1@10.242.238.90',<18125.21726.0>}]) [rebalance:info,2014-08-19T16:50:07.132,ns_1@10.242.238.88:<0.2073.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:07.133,ns_1@10.242.238.88:<0.2073.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 970 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:07.133,ns_1@10.242.238.88:<0.2073.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:07.134,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{970, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:07.138,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{716, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:07.138,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",716, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.2108.1>) [ns_server:debug,2014-08-19T16:50:07.138,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 716) [ns_server:debug,2014-08-19T16:50:07.138,ns_1@10.242.238.88:<0.2109.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:07.138,ns_1@10.242.238.88:<0.2109.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:07.139,ns_1@10.242.238.88:<0.2108.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 716 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:07.139,ns_1@10.242.238.88:<0.2114.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 716 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:07.139,ns_1@10.242.238.88:<0.2115.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 716 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:07.142,ns_1@10.242.238.88:<0.2116.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 716 into 'ns_1@10.242.238.91' is <18126.22954.0> [ns_server:debug,2014-08-19T16:50:07.145,ns_1@10.242.238.88:<0.2116.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 716 into 'ns_1@10.242.238.90' is <18125.21731.0> [rebalance:debug,2014-08-19T16:50:07.145,ns_1@10.242.238.88:<0.2108.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 716 is <0.2116.1> [ns_server:debug,2014-08-19T16:50:07.179,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,170924}, tap_estimate, {replica_building,"default",716,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22954.0>, <<"replication_building_716_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:07.192,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 725. Nacking mccouch update. [views:debug,2014-08-19T16:50:07.192,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/725. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:07.192,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",725,active,0} [ns_server:debug,2014-08-19T16:50:07.194,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,412,284,957, 829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360,905, 777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308,981, 853,670,542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929,801, 746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877,694, 566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,408,280,953,825, 642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901,773, 718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432,304,977,849, 666,538,172,900,772,406,278,951,823,640,512,146,874,508,380,1002,925,797,742, 614,248,120,976,848,482,354,899,771,716,588,222,950,822,767,456,328,873,690, 562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770,404,276,949,821, 766,638,144,872,506,378,1000,923,795,740,612,246,118,974,846,480,352,897,769, 714,586,220,948,820,765,454,326,999,871,688,560,194,922,794,739,428,300,973, 845,662,534,168,896,768,402,274,947,883,819,764,700,636,572,206,142,1011,998, 934,870,806,751,504,440,376,312,985,921,857,793,738,674,610,546,244,180,116, 972,908,844,780,725,478,414,350,286,959,895,831,712,648,584,520,218,154,1023, 946,882,818,763,452,388,324,260,1010,997,869,686,558,192,920,792,737,426,298, 971,843,660,532,166,894,400,272,1022,945,817,762,634,140,996,868,502,374,919, 791,736,608,242,114,970,842,476,348,893,710,582,216,1021,944,816,761,450,322, 995,867,684,556,190,918,790,735,424,296,969,841,658,530,164,892,398,270,1020, 943,815,760,632,138,994,866,500,372,917,789,734,606,240,112,968,840,474,346, 891,708,580,214,1019,942,814,759,448,320,993,865,682,554,188,916,788,733,422, 294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992,864,498,370, 915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446, 318,991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,394,266, 1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470, 342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729, 418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494, 366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442, 314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,390,262, 1012,935,807,752,624,130,986,858,492,364,909,781,726,598,232,960,832,466,338] [ns_server:debug,2014-08-19T16:50:07.195,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,186545}, tap_estimate, {replica_building,"default",716,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21731.0>, <<"replication_building_716_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:07.196,ns_1@10.242.238.88:<0.2117.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.21731.0>}, {'ns_1@10.242.238.91',<18126.22954.0>}]) [rebalance:info,2014-08-19T16:50:07.196,ns_1@10.242.238.88:<0.2108.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:07.196,ns_1@10.242.238.88:<0.2108.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 716 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:07.197,ns_1@10.242.238.88:<0.2108.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:07.197,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{716, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:07.201,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{460, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:07.201,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",460, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.2134.1>) [ns_server:debug,2014-08-19T16:50:07.202,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 460) [ns_server:debug,2014-08-19T16:50:07.202,ns_1@10.242.238.88:<0.2135.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:07.202,ns_1@10.242.238.88:<0.2135.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:07.202,ns_1@10.242.238.88:<0.2134.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 460 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:07.202,ns_1@10.242.238.88:<0.2140.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 460 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:07.202,ns_1@10.242.238.88:<0.2141.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 460 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:07.207,ns_1@10.242.238.88:<0.2142.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 460 into 'ns_1@10.242.238.91' is <18126.22959.0> [ns_server:debug,2014-08-19T16:50:07.210,ns_1@10.242.238.88:<0.2142.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 460 into 'ns_1@10.242.238.89' is <18124.26570.0> [rebalance:debug,2014-08-19T16:50:07.210,ns_1@10.242.238.88:<0.2134.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 460 is <0.2142.1> [views:debug,2014-08-19T16:50:07.225,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/725. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:07.226,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",725,active,0} [ns_server:debug,2014-08-19T16:50:07.244,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,235609}, tap_estimate, {replica_building,"default",460,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22959.0>, <<"replication_building_460_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:07.259,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,250840}, tap_estimate, {replica_building,"default",460,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26570.0>, <<"replication_building_460_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:07.260,ns_1@10.242.238.88:<0.2143.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26570.0>}, {'ns_1@10.242.238.91',<18126.22959.0>}]) [rebalance:info,2014-08-19T16:50:07.260,ns_1@10.242.238.88:<0.2134.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:07.261,ns_1@10.242.238.88:<0.2134.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 460 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:07.261,ns_1@10.242.238.88:<0.2134.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:07.262,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{460, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:07.266,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{969, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:07.266,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",969, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.2155.1>) [ns_server:debug,2014-08-19T16:50:07.266,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 969) [ns_server:debug,2014-08-19T16:50:07.266,ns_1@10.242.238.88:<0.2156.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:07.266,ns_1@10.242.238.88:<0.2156.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:07.266,ns_1@10.242.238.88:<0.2155.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 969 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:07.267,ns_1@10.242.238.88:<0.2161.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 969 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:07.267,ns_1@10.242.238.88:<0.2162.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 969 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:07.272,ns_1@10.242.238.88:<0.2163.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 969 into 'ns_1@10.242.238.90' is <18125.21751.0> [ns_server:debug,2014-08-19T16:50:07.274,ns_1@10.242.238.88:<0.2163.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 969 into 'ns_1@10.242.238.91' is <18126.22978.0> [rebalance:debug,2014-08-19T16:50:07.274,ns_1@10.242.238.88:<0.2155.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 969 is <0.2163.1> [ns_server:debug,2014-08-19T16:50:07.307,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,298608}, tap_estimate, {replica_building,"default",969,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21751.0>, <<"replication_building_969_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:07.326,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 723. Nacking mccouch update. [views:debug,2014-08-19T16:50:07.326,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/723. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:07.326,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",723,active,0} [ns_server:debug,2014-08-19T16:50:07.327,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,318058}, tap_estimate, {replica_building,"default",969,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22978.0>, <<"replication_building_969_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:07.327,ns_1@10.242.238.88:<0.2164.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.22978.0>}, {'ns_1@10.242.238.90',<18125.21751.0>}]) [rebalance:info,2014-08-19T16:50:07.327,ns_1@10.242.238.88:<0.2155.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:07.328,ns_1@10.242.238.88:<0.2155.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 969 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:07.328,ns_1@10.242.238.88:<0.2155.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:07.328,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360, 905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308, 981,853,670,542,176,904,776,410,282,955,827,644,516,150,878,384,256,1006,929, 801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332,877, 694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,408,280,953, 825,642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484,356,901, 773,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432,304,977, 849,666,538,172,900,772,406,278,951,823,640,512,146,874,508,380,1002,925,797, 742,614,248,120,976,848,482,354,899,771,716,588,222,950,822,767,456,328,873, 690,562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770,404,276,949, 821,766,638,144,872,506,378,1000,923,795,740,612,246,118,974,846,480,352,897, 769,714,586,220,948,820,765,454,326,999,871,688,560,194,922,794,739,428,300, 973,845,662,534,168,896,768,402,274,947,883,819,764,700,636,572,206,142,1011, 998,934,870,806,751,504,440,376,312,985,921,857,793,738,674,610,546,244,180, 116,972,908,844,780,725,478,414,350,286,959,895,831,712,648,584,520,218,154, 1023,946,882,818,763,452,388,324,260,1010,997,869,686,558,192,920,792,737, 426,298,971,843,660,532,166,894,400,272,1022,945,817,762,634,140,996,868,502, 374,919,791,736,608,242,114,970,842,476,348,893,710,582,216,1021,944,816,761, 450,322,995,867,684,556,190,918,790,735,424,296,969,841,658,530,164,892,398, 270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606,240,112,968,840, 474,346,891,708,580,214,1019,942,814,759,448,320,993,865,682,554,188,916,788, 733,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992,864, 498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812, 757,446,318,991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888, 394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964, 836,470,342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912, 784,729,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988, 860,494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808, 753,442,314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884, 390,262,1012,935,807,752,624,130,986,858,492,364,909,781,726,598,232,960,832, 466,338] [ns_server:debug,2014-08-19T16:50:07.329,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{969, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:07.333,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{715, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:07.333,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",715, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.2190.1>) [ns_server:debug,2014-08-19T16:50:07.333,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 715) [ns_server:debug,2014-08-19T16:50:07.333,ns_1@10.242.238.88:<0.2191.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:07.334,ns_1@10.242.238.88:<0.2191.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:07.334,ns_1@10.242.238.88:<0.2190.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 715 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:07.334,ns_1@10.242.238.88:<0.2196.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 715 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:07.334,ns_1@10.242.238.88:<0.2197.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 715 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:07.338,ns_1@10.242.238.88:<0.2198.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 715 into 'ns_1@10.242.238.91' is <18126.22984.0> [ns_server:debug,2014-08-19T16:50:07.340,ns_1@10.242.238.88:<0.2198.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 715 into 'ns_1@10.242.238.90' is <18125.21756.0> [rebalance:debug,2014-08-19T16:50:07.340,ns_1@10.242.238.88:<0.2190.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 715 is <0.2198.1> [views:debug,2014-08-19T16:50:07.359,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/723. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:07.360,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",723,active,0} [ns_server:debug,2014-08-19T16:50:07.373,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,364738}, tap_estimate, {replica_building,"default",715,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22984.0>, <<"replication_building_715_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:07.391,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,382863}, tap_estimate, {replica_building,"default",715,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21756.0>, <<"replication_building_715_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:07.392,ns_1@10.242.238.88:<0.2199.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.21756.0>}, {'ns_1@10.242.238.91',<18126.22984.0>}]) [rebalance:info,2014-08-19T16:50:07.392,ns_1@10.242.238.88:<0.2190.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:07.393,ns_1@10.242.238.88:<0.2190.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 715 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:07.394,ns_1@10.242.238.88:<0.2190.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:07.394,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{715, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:07.398,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{459, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:07.398,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",459, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.2219.1>) [ns_server:debug,2014-08-19T16:50:07.398,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 459) [ns_server:debug,2014-08-19T16:50:07.399,ns_1@10.242.238.88:<0.2220.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:07.399,ns_1@10.242.238.88:<0.2220.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:07.399,ns_1@10.242.238.88:<0.2219.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 459 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:07.399,ns_1@10.242.238.88:<0.2225.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 459 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:07.399,ns_1@10.242.238.88:<0.2226.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 459 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:07.403,ns_1@10.242.238.88:<0.2232.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 459 into 'ns_1@10.242.238.91' is <18126.22989.0> [ns_server:debug,2014-08-19T16:50:07.405,ns_1@10.242.238.88:<0.2232.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 459 into 'ns_1@10.242.238.89' is <18124.26590.0> [rebalance:debug,2014-08-19T16:50:07.405,ns_1@10.242.238.88:<0.2219.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 459 is <0.2232.1> [ns_server:debug,2014-08-19T16:50:07.440,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,431696}, tap_estimate, {replica_building,"default",459,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.22989.0>, <<"replication_building_459_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:07.447,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 721. Nacking mccouch update. [views:debug,2014-08-19T16:50:07.447,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/721. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:07.447,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",721,active,0} [ns_server:debug,2014-08-19T16:50:07.448,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360, 905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308, 981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,384,256,1006, 929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332, 877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,408,280, 953,825,642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484,356, 901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432,304, 977,849,666,538,172,900,772,406,278,951,823,640,512,146,874,508,380,1002,925, 797,742,614,248,120,976,848,482,354,899,771,716,588,222,950,822,767,456,328, 873,690,562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770,404,276, 949,821,766,638,144,872,506,378,1000,923,795,740,612,246,118,974,846,480,352, 897,769,714,586,220,948,820,765,454,326,999,871,688,560,194,922,794,739,428, 300,973,845,662,534,168,896,768,402,274,947,883,819,764,700,636,572,206,142, 1011,998,934,870,806,751,504,440,376,312,985,921,857,793,738,674,610,546,244, 180,116,972,908,844,780,725,478,414,350,286,959,895,831,712,648,584,520,218, 154,1023,946,882,818,763,452,388,324,260,1010,997,869,686,558,192,920,792, 737,426,298,971,843,660,532,166,894,400,272,1022,945,817,762,634,140,996,868, 502,374,919,791,736,608,242,114,970,842,476,348,893,710,582,216,1021,944,816, 761,450,322,995,867,684,556,190,918,790,735,424,296,969,841,658,530,164,892, 398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606,240,112,968, 840,474,346,891,708,580,214,1019,942,814,759,448,320,993,865,682,554,188,916, 788,733,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992, 864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940, 812,757,446,318,991,863,680,552,186,914,786,731,420,292,965,837,654,526,160, 888,394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108, 964,836,470,342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184, 912,784,729,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132, 988,860,494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936, 808,753,442,314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156, 884,390,262,1012,935,807,752,624,130,986,858,492,364,909,781,726,598,232,960, 832,466,338] [ns_server:debug,2014-08-19T16:50:07.456,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,447504}, tap_estimate, {replica_building,"default",459,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26590.0>, <<"replication_building_459_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:07.457,ns_1@10.242.238.88:<0.2234.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26590.0>}, {'ns_1@10.242.238.91',<18126.22989.0>}]) [rebalance:info,2014-08-19T16:50:07.457,ns_1@10.242.238.88:<0.2219.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:07.457,ns_1@10.242.238.88:<0.2219.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 459 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:07.458,ns_1@10.242.238.88:<0.2219.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:07.458,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{459, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:07.462,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{968, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:07.462,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",968, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.2246.1>) [ns_server:debug,2014-08-19T16:50:07.463,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 968) [ns_server:debug,2014-08-19T16:50:07.463,ns_1@10.242.238.88:<0.2247.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:07.463,ns_1@10.242.238.88:<0.2247.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:07.463,ns_1@10.242.238.88:<0.2246.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 968 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:07.463,ns_1@10.242.238.88:<0.2252.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 968 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:07.463,ns_1@10.242.238.88:<0.2253.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 968 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:07.467,ns_1@10.242.238.88:<0.2254.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 968 into 'ns_1@10.242.238.90' is <18125.21768.0> [ns_server:debug,2014-08-19T16:50:07.468,ns_1@10.242.238.88:<0.2254.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 968 into 'ns_1@10.242.238.91' is <18126.23008.0> [rebalance:debug,2014-08-19T16:50:07.468,ns_1@10.242.238.88:<0.2246.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 968 is <0.2254.1> [ns_server:debug,2014-08-19T16:50:07.502,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,493826}, tap_estimate, {replica_building,"default",968,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21768.0>, <<"replication_building_968_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:07.519,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,510128}, tap_estimate, {replica_building,"default",968,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23008.0>, <<"replication_building_968_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:07.519,ns_1@10.242.238.88:<0.2255.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.23008.0>}, {'ns_1@10.242.238.90',<18125.21768.0>}]) [rebalance:info,2014-08-19T16:50:07.519,ns_1@10.242.238.88:<0.2246.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:07.520,ns_1@10.242.238.88:<0.2246.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 968 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:07.520,ns_1@10.242.238.88:<0.2246.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:07.521,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{968, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:07.525,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{714, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:07.525,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",714, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.2267.1>) [ns_server:debug,2014-08-19T16:50:07.525,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 714) [ns_server:debug,2014-08-19T16:50:07.526,ns_1@10.242.238.88:<0.2268.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:07.526,ns_1@10.242.238.88:<0.2268.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:07.526,ns_1@10.242.238.88:<0.2267.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 714 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:07.526,ns_1@10.242.238.88:<0.2273.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 714 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:07.526,ns_1@10.242.238.88:<0.2274.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 714 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:07.531,ns_1@10.242.238.88:<0.2275.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 714 into 'ns_1@10.242.238.91' is <18126.23014.0> [views:debug,2014-08-19T16:50:07.531,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/721. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:07.531,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",721,active,0} [ns_server:debug,2014-08-19T16:50:07.533,ns_1@10.242.238.88:<0.2275.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 714 into 'ns_1@10.242.238.90' is <18125.21787.0> [rebalance:debug,2014-08-19T16:50:07.533,ns_1@10.242.238.88:<0.2267.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 714 is <0.2275.1> [ns_server:debug,2014-08-19T16:50:07.568,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,559142}, tap_estimate, {replica_building,"default",714,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23014.0>, <<"replication_building_714_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:07.584,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,575291}, tap_estimate, {replica_building,"default",714,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21787.0>, <<"replication_building_714_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:07.584,ns_1@10.242.238.88:<0.2276.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.21787.0>}, {'ns_1@10.242.238.91',<18126.23014.0>}]) [rebalance:info,2014-08-19T16:50:07.584,ns_1@10.242.238.88:<0.2267.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:07.585,ns_1@10.242.238.88:<0.2267.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 714 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:07.586,ns_1@10.242.238.88:<0.2267.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:07.586,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{714, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:07.590,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{458, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:07.590,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",458, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.2288.1>) [ns_server:debug,2014-08-19T16:50:07.590,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 458) [ns_server:debug,2014-08-19T16:50:07.591,ns_1@10.242.238.88:<0.2289.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:07.591,ns_1@10.242.238.88:<0.2289.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:07.591,ns_1@10.242.238.88:<0.2288.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 458 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:07.591,ns_1@10.242.238.88:<0.2294.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 458 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:07.591,ns_1@10.242.238.88:<0.2295.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 458 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:07.594,ns_1@10.242.238.88:<0.2296.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 458 into 'ns_1@10.242.238.91' is <18126.23025.0> [ns_server:debug,2014-08-19T16:50:07.597,ns_1@10.242.238.88:<0.2296.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 458 into 'ns_1@10.242.238.89' is <18124.26610.0> [rebalance:debug,2014-08-19T16:50:07.597,ns_1@10.242.238.88:<0.2288.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 458 is <0.2296.1> [ns_server:debug,2014-08-19T16:50:07.631,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,622146}, tap_estimate, {replica_building,"default",458,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23025.0>, <<"replication_building_458_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:07.647,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,638042}, tap_estimate, {replica_building,"default",458,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26610.0>, <<"replication_building_458_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:07.647,ns_1@10.242.238.88:<0.2297.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26610.0>}, {'ns_1@10.242.238.91',<18126.23025.0>}]) [rebalance:info,2014-08-19T16:50:07.647,ns_1@10.242.238.88:<0.2288.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:07.648,ns_1@10.242.238.88:<0.2288.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 458 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:07.648,ns_1@10.242.238.88:<0.2288.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:07.649,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{458, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:07.653,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{967, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:07.653,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",967, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.2323.1>) [ns_server:debug,2014-08-19T16:50:07.653,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 967) [ns_server:debug,2014-08-19T16:50:07.654,ns_1@10.242.238.88:<0.2324.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:07.654,ns_1@10.242.238.88:<0.2324.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:07.654,ns_1@10.242.238.88:<0.2323.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 967 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:07.654,ns_1@10.242.238.88:<0.2330.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 967 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [rebalance:info,2014-08-19T16:50:07.654,ns_1@10.242.238.88:<0.2329.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 967 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [ns_server:debug,2014-08-19T16:50:07.658,ns_1@10.242.238.88:<0.2331.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 967 into 'ns_1@10.242.238.90' is <18125.21793.0> [ns_server:debug,2014-08-19T16:50:07.660,ns_1@10.242.238.88:<0.2331.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 967 into 'ns_1@10.242.238.91' is <18126.23038.0> [rebalance:debug,2014-08-19T16:50:07.660,ns_1@10.242.238.88:<0.2323.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 967 is <0.2331.1> [ns_server:debug,2014-08-19T16:50:07.689,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 719. Nacking mccouch update. [views:debug,2014-08-19T16:50:07.689,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/719. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:07.689,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",719,active,0} [ns_server:debug,2014-08-19T16:50:07.691,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360, 905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308, 981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,384,256,1006, 929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332, 877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408, 280,953,825,642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484, 356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432, 304,977,849,666,538,172,900,772,406,278,951,823,640,512,146,874,508,380,1002, 925,797,742,614,248,120,976,848,482,354,899,771,716,588,222,950,822,767,456, 328,873,690,562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770,404, 276,949,821,766,638,144,872,506,378,1000,923,795,740,612,246,118,974,846,480, 352,897,769,714,586,220,948,820,765,454,326,999,871,688,560,194,922,794,739, 428,300,973,845,662,534,168,896,768,402,274,947,819,764,636,142,998,934,870, 806,751,504,440,376,312,985,921,857,793,738,674,610,546,244,180,116,972,908, 844,780,725,478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946,882, 818,763,452,388,324,260,1010,997,869,686,558,192,920,792,737,426,298,971,843, 660,532,166,894,400,272,1022,945,817,762,634,140,996,868,502,374,919,791,736, 608,242,114,970,842,476,348,893,710,582,216,1021,944,816,761,450,322,995,867, 684,556,190,918,790,735,424,296,969,841,658,530,164,892,398,270,1020,943,815, 760,632,138,994,866,500,372,917,789,734,606,240,112,968,840,474,346,891,708, 580,214,1019,942,814,759,448,320,993,865,682,554,188,916,788,733,422,294,967, 839,656,528,162,890,396,268,1018,941,813,758,630,136,992,864,498,370,915,787, 732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318,991, 863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,394,266,1016,939, 811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470,342,887, 704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290, 963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494,366,911, 783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314,987, 859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,390,262,1012,935, 807,752,624,130,986,858,492,364,909,781,726,598,232,960,832,466,338,883,700, 572,206,1011] [ns_server:debug,2014-08-19T16:50:07.694,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,685569}, tap_estimate, {replica_building,"default",967,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21793.0>, <<"replication_building_967_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:07.711,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,702940}, tap_estimate, {replica_building,"default",967,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23038.0>, <<"replication_building_967_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:07.712,ns_1@10.242.238.88:<0.2332.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.23038.0>}, {'ns_1@10.242.238.90',<18125.21793.0>}]) [rebalance:info,2014-08-19T16:50:07.712,ns_1@10.242.238.88:<0.2323.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:07.713,ns_1@10.242.238.88:<0.2323.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 967 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:07.713,ns_1@10.242.238.88:<0.2323.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:07.714,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{967, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:07.718,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{713, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:07.719,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",713, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.2344.1>) [ns_server:debug,2014-08-19T16:50:07.719,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 713) [ns_server:debug,2014-08-19T16:50:07.719,ns_1@10.242.238.88:<0.2345.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:07.719,ns_1@10.242.238.88:<0.2345.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:07.719,ns_1@10.242.238.88:<0.2344.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 713 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:07.719,ns_1@10.242.238.88:<0.2350.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 713 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:07.719,ns_1@10.242.238.88:<0.2351.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 713 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:07.723,ns_1@10.242.238.88:<0.2352.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 713 into 'ns_1@10.242.238.91' is <18126.23058.0> [ns_server:debug,2014-08-19T16:50:07.726,ns_1@10.242.238.88:<0.2352.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 713 into 'ns_1@10.242.238.90' is <18125.21812.0> [rebalance:debug,2014-08-19T16:50:07.726,ns_1@10.242.238.88:<0.2344.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 713 is <0.2352.1> [views:debug,2014-08-19T16:50:07.748,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/719. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:07.748,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",719,active,0} [ns_server:debug,2014-08-19T16:50:07.759,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,750314}, tap_estimate, {replica_building,"default",713,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23058.0>, <<"replication_building_713_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:07.775,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,766378}, tap_estimate, {replica_building,"default",713,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21812.0>, <<"replication_building_713_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:07.775,ns_1@10.242.238.88:<0.2353.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.21812.0>}, {'ns_1@10.242.238.91',<18126.23058.0>}]) [rebalance:info,2014-08-19T16:50:07.776,ns_1@10.242.238.88:<0.2344.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:07.777,ns_1@10.242.238.88:<0.2344.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 713 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:07.778,ns_1@10.242.238.88:<0.2344.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:07.778,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{713, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:07.782,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{457, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:07.783,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",457, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.2365.1>) [ns_server:debug,2014-08-19T16:50:07.783,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 457) [ns_server:debug,2014-08-19T16:50:07.783,ns_1@10.242.238.88:<0.2366.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:07.783,ns_1@10.242.238.88:<0.2366.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:07.783,ns_1@10.242.238.88:<0.2365.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 457 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:07.784,ns_1@10.242.238.88:<0.2371.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 457 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:07.784,ns_1@10.242.238.88:<0.2372.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 457 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:07.788,ns_1@10.242.238.88:<0.2373.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 457 into 'ns_1@10.242.238.91' is <18126.23063.0> [ns_server:debug,2014-08-19T16:50:07.791,ns_1@10.242.238.88:<0.2373.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 457 into 'ns_1@10.242.238.89' is <18124.26616.0> [rebalance:debug,2014-08-19T16:50:07.791,ns_1@10.242.238.88:<0.2365.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 457 is <0.2373.1> [ns_server:debug,2014-08-19T16:50:07.826,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,817094}, tap_estimate, {replica_building,"default",457,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23063.0>, <<"replication_building_457_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:07.838,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,829480}, tap_estimate, {replica_building,"default",457,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26616.0>, <<"replication_building_457_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:07.839,ns_1@10.242.238.88:<0.2374.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26616.0>}, {'ns_1@10.242.238.91',<18126.23063.0>}]) [rebalance:info,2014-08-19T16:50:07.839,ns_1@10.242.238.88:<0.2365.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:07.839,ns_1@10.242.238.88:<0.2365.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 457 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:07.840,ns_1@10.242.238.88:<0.2365.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:07.841,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{457, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:07.844,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{966, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:07.845,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",966, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.2400.1>) [ns_server:debug,2014-08-19T16:50:07.845,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 966) [ns_server:debug,2014-08-19T16:50:07.845,ns_1@10.242.238.88:<0.2401.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:07.845,ns_1@10.242.238.88:<0.2401.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:07.846,ns_1@10.242.238.88:<0.2400.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 966 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:07.846,ns_1@10.242.238.88:<0.2406.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 966 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:07.846,ns_1@10.242.238.88:<0.2407.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 966 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:07.851,ns_1@10.242.238.88:<0.2408.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 966 into 'ns_1@10.242.238.90' is <18125.21819.0> [ns_server:debug,2014-08-19T16:50:07.852,ns_1@10.242.238.88:<0.2408.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 966 into 'ns_1@10.242.238.91' is <18126.23068.0> [rebalance:debug,2014-08-19T16:50:07.853,ns_1@10.242.238.88:<0.2400.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 966 is <0.2408.1> [ns_server:debug,2014-08-19T16:50:07.887,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,878602}, tap_estimate, {replica_building,"default",966,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21819.0>, <<"replication_building_966_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:07.904,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,895071}, tap_estimate, {replica_building,"default",966,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23068.0>, <<"replication_building_966_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:07.904,ns_1@10.242.238.88:<0.2409.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.23068.0>}, {'ns_1@10.242.238.90',<18125.21819.0>}]) [rebalance:info,2014-08-19T16:50:07.904,ns_1@10.242.238.88:<0.2400.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:07.905,ns_1@10.242.238.88:<0.2400.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 966 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:07.905,ns_1@10.242.238.88:<0.2400.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:07.906,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{966, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:07.910,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{712, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:07.910,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",712, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.2421.1>) [ns_server:debug,2014-08-19T16:50:07.910,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 712) [ns_server:debug,2014-08-19T16:50:07.911,ns_1@10.242.238.88:<0.2422.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:07.911,ns_1@10.242.238.88:<0.2422.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:07.911,ns_1@10.242.238.88:<0.2421.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 712 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:07.911,ns_1@10.242.238.88:<0.2427.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 712 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:07.911,ns_1@10.242.238.88:<0.2428.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 712 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:07.915,ns_1@10.242.238.88:<0.2429.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 712 into 'ns_1@10.242.238.91' is <18126.23088.0> [ns_server:debug,2014-08-19T16:50:07.917,ns_1@10.242.238.88:<0.2429.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 712 into 'ns_1@10.242.238.90' is <18125.21835.0> [rebalance:debug,2014-08-19T16:50:07.918,ns_1@10.242.238.88:<0.2421.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 712 is <0.2429.1> [ns_server:debug,2014-08-19T16:50:07.923,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 717. Nacking mccouch update. [views:debug,2014-08-19T16:50:07.923,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/717. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:07.924,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",717,active,0} [ns_server:debug,2014-08-19T16:50:07.925,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360, 905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308, 981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,384,256,1006, 929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332, 877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408, 280,953,825,642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484, 356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432, 304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,508,380, 1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588,222,950,822,767, 456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770, 404,276,949,821,766,638,144,872,506,378,1000,923,795,740,612,246,118,974,846, 480,352,897,769,714,586,220,948,820,765,454,326,999,871,688,560,194,922,794, 739,428,300,973,845,662,534,168,896,768,402,274,947,819,764,636,142,998,934, 870,806,751,504,440,376,312,985,921,857,793,738,674,610,546,244,180,116,972, 908,844,780,725,478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946, 882,818,763,452,388,324,260,1010,997,869,686,558,192,920,792,737,426,298,971, 843,660,532,166,894,400,272,1022,945,817,762,634,140,996,868,502,374,919,791, 736,608,242,114,970,842,476,348,893,710,582,216,1021,944,816,761,450,322,995, 867,684,556,190,918,790,735,424,296,969,841,658,530,164,892,398,270,1020,943, 815,760,632,138,994,866,500,372,917,789,734,606,240,112,968,840,474,346,891, 708,580,214,1019,942,814,759,448,320,993,865,682,554,188,916,788,733,422,294, 967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992,864,498,370,915, 787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318, 991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,394,266,1016, 939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470,342, 887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418, 290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494,366, 911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314, 987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,390,262,1012, 935,807,752,624,130,986,858,492,364,909,781,726,598,232,960,832,466,338,883, 700,572,206,1011] [ns_server:debug,2014-08-19T16:50:07.952,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,943099}, tap_estimate, {replica_building,"default",712,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23088.0>, <<"replication_building_712_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:07.950,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,712, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}]}, {move_state,966, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_966_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_966_'ns_1@10.242.238.90'">>}]}, {move_state,457, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_457_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_457_'ns_1@10.242.238.91'">>}]}, {move_state,713, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_713_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_713_'ns_1@10.242.238.91'">>}]}, {move_state,967, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_967_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_967_'ns_1@10.242.238.90'">>}]}, {move_state,458, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_458_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_458_'ns_1@10.242.238.91'">>}]}, {move_state,714, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_714_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_714_'ns_1@10.242.238.91'">>}]}, {move_state,968, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_968_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_968_'ns_1@10.242.238.90'">>}]}, {move_state,459, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_459_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_459_'ns_1@10.242.238.91'">>}]}, {move_state,715, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_715_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_715_'ns_1@10.242.238.91'">>}]}, {move_state,969, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_969_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_969_'ns_1@10.242.238.90'">>}]}, {move_state,460, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_460_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_460_'ns_1@10.242.238.91'">>}]}, {move_state,716, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_716_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_716_'ns_1@10.242.238.91'">>}]}, {move_state,970, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_970_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_970_'ns_1@10.242.238.90'">>}]}, {move_state,461, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_461_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_461_'ns_1@10.242.238.91'">>}]}, {move_state,717, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_717_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_717_'ns_1@10.242.238.91'">>}]}, {move_state,971, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_971_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_971_'ns_1@10.242.238.90'">>}]}, {move_state,462, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_462_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_462_'ns_1@10.242.238.91'">>}]}, {move_state,718, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_718_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_718_'ns_1@10.242.238.91'">>}]}, {move_state,972, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_972_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_972_'ns_1@10.242.238.90'">>}]}, {move_state,463, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_463_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_463_'ns_1@10.242.238.91'">>}]}, {move_state,719, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_719_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_719_'ns_1@10.242.238.91'">>}]}, {move_state,973, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_973_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_973_'ns_1@10.242.238.90'">>}]}, {move_state,464, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_464_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_464_'ns_1@10.242.238.91'">>}]}, {move_state,720, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_720_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_720_'ns_1@10.242.238.91'">>}]}, {move_state,974, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_974_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_974_'ns_1@10.242.238.90'">>}]}, {move_state,465, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_465_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_465_'ns_1@10.242.238.91'">>}]}, {move_state,721, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_721_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_721_'ns_1@10.242.238.91'">>}]}, {move_state,975, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_975_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_975_'ns_1@10.242.238.90'">>}]}, {move_state,466, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_466_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_466_'ns_1@10.242.238.91'">>}]}, {move_state,722, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_722_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_722_'ns_1@10.242.238.91'">>}]}, {move_state,976, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_976_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_976_'ns_1@10.242.238.90'">>}]}, {move_state,467, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_467_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_467_'ns_1@10.242.238.91'">>}]}, {move_state,723, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_723_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_723_'ns_1@10.242.238.91'">>}]}, {move_state,977, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_977_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_977_'ns_1@10.242.238.90'">>}]}, {move_state,468, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_468_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_468_'ns_1@10.242.238.91'">>}]}, {move_state,724, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_724_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_724_'ns_1@10.242.238.91'">>}]}, {move_state,978, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_978_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_978_'ns_1@10.242.238.90'">>}]}, {move_state,469, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_469_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_469_'ns_1@10.242.238.91'">>}]}, {move_state,725, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_725_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_725_'ns_1@10.242.238.91'">>}]}, {move_state,979, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_979_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_979_'ns_1@10.242.238.90'">>}]}] [ns_server:debug,2014-08-19T16:50:07.954,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 966, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:07.955,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 457, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.955,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 713, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 967, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:07.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 458, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 714, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 968, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:07.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 459, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 715, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 969, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:07.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 460, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 716, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 970, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:07.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 461, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 717, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 971, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:07.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 462, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 718, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 972, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:07.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 463, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 719, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 973, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:07.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 464, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 720, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 974, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:07.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 465, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 721, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452607,958891}, tap_estimate, {replica_building,"default",712,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21835.0>, <<"replication_building_712_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:07.968,ns_1@10.242.238.88:<0.2430.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.21835.0>}, {'ns_1@10.242.238.91',<18126.23088.0>}]) [ns_server:debug,2014-08-19T16:50:07.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 975, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [rebalance:info,2014-08-19T16:50:07.968,ns_1@10.242.238.88:<0.2421.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [ns_server:debug,2014-08-19T16:50:07.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 466, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [rebalance:info,2014-08-19T16:50:07.969,ns_1@10.242.238.88:<0.2421.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 712 on ns_1@10.242.238.88 [ns_server:debug,2014-08-19T16:50:07.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 722, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [rebalance:info,2014-08-19T16:50:07.969,ns_1@10.242.238.88:<0.2421.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:07.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 976, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:07.970,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{712, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:07.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 467, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 723, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 977, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:07.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 468, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 724, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 978, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:07.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 469, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 725, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:07.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 979, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:07.975,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{456, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:07.975,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",456, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.2483.1>) [ns_server:debug,2014-08-19T16:50:07.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 456) [ns_server:debug,2014-08-19T16:50:07.976,ns_1@10.242.238.88:<0.2484.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:07.976,ns_1@10.242.238.88:<0.2484.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:07.976,ns_1@10.242.238.88:<0.2483.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 456 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:07.976,ns_1@10.242.238.88:<0.2489.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 456 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:07.976,ns_1@10.242.238.88:<0.2490.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 456 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:07.981,ns_1@10.242.238.88:<0.2491.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 456 into 'ns_1@10.242.238.91' is <18126.23093.0> [ns_server:debug,2014-08-19T16:50:07.983,ns_1@10.242.238.88:<0.2491.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 456 into 'ns_1@10.242.238.89' is <18124.26642.0> [rebalance:debug,2014-08-19T16:50:07.983,ns_1@10.242.238.88:<0.2483.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 456 is <0.2491.1> [views:debug,2014-08-19T16:50:07.983,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/717. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:07.983,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",717,active,0} [ns_server:debug,2014-08-19T16:50:08.019,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,10580}, tap_estimate, {replica_building,"default",456,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23093.0>, <<"replication_building_456_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:08.036,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,26973}, tap_estimate, {replica_building,"default",456,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26642.0>, <<"replication_building_456_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:08.036,ns_1@10.242.238.88:<0.2492.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26642.0>}, {'ns_1@10.242.238.91',<18126.23093.0>}]) [rebalance:info,2014-08-19T16:50:08.036,ns_1@10.242.238.88:<0.2483.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:08.037,ns_1@10.242.238.88:<0.2483.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 456 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:08.037,ns_1@10.242.238.88:<0.2483.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:08.038,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{456, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:08.042,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{965, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:08.042,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",965, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.2504.1>) [ns_server:debug,2014-08-19T16:50:08.042,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 965) [ns_server:debug,2014-08-19T16:50:08.042,ns_1@10.242.238.88:<0.2505.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:08.043,ns_1@10.242.238.88:<0.2505.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:08.043,ns_1@10.242.238.88:<0.2504.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 965 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:08.043,ns_1@10.242.238.88:<0.2510.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 965 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:08.043,ns_1@10.242.238.88:<0.2511.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 965 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:08.047,ns_1@10.242.238.88:<0.2512.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 965 into 'ns_1@10.242.238.90' is <18125.21844.0> [ns_server:debug,2014-08-19T16:50:08.050,ns_1@10.242.238.88:<0.2512.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 965 into 'ns_1@10.242.238.91' is <18126.23098.0> [rebalance:debug,2014-08-19T16:50:08.050,ns_1@10.242.238.88:<0.2504.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 965 is <0.2512.1> [ns_server:debug,2014-08-19T16:50:08.082,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,73622}, tap_estimate, {replica_building,"default",965,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21844.0>, <<"replication_building_965_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:08.098,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,89746}, tap_estimate, {replica_building,"default",965,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23098.0>, <<"replication_building_965_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:08.099,ns_1@10.242.238.88:<0.2513.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.23098.0>}, {'ns_1@10.242.238.90',<18125.21844.0>}]) [rebalance:info,2014-08-19T16:50:08.116,ns_1@10.242.238.88:<0.2504.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:08.116,ns_1@10.242.238.88:<0.2504.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 965 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:08.117,ns_1@10.242.238.88:<0.2504.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:08.117,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{965, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:08.121,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{711, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:08.157,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",711, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.2539.1>) [ns_server:debug,2014-08-19T16:50:08.157,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 711) [ns_server:debug,2014-08-19T16:50:08.158,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 715. Nacking mccouch update. [views:debug,2014-08-19T16:50:08.158,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/715. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:08.158,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",715,active,0} [ns_server:debug,2014-08-19T16:50:08.158,ns_1@10.242.238.88:<0.2540.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:08.159,ns_1@10.242.238.88:<0.2540.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:08.159,ns_1@10.242.238.88:<0.2539.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 711 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:08.159,ns_1@10.242.238.88:<0.2545.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 711 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:08.159,ns_1@10.242.238.88:<0.2546.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 711 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:08.160,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360, 905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308, 981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,384,256,1006, 929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332, 877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408, 280,953,825,642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484, 356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432, 304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,508,380, 1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588,222,950,822,767, 456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770, 715,404,276,949,821,766,638,144,872,506,378,1000,923,795,740,612,246,118,974, 846,480,352,897,769,714,586,220,948,820,765,454,326,999,871,688,560,194,922, 794,739,428,300,973,845,662,534,168,896,768,402,274,947,819,764,636,142,998, 934,870,806,751,504,440,376,312,985,921,857,793,738,674,610,546,244,180,116, 972,908,844,780,725,478,414,350,286,959,895,831,712,648,584,520,218,154,1023, 946,882,818,763,452,388,324,260,1010,997,869,686,558,192,920,792,737,426,298, 971,843,660,532,166,894,400,272,1022,945,817,762,634,140,996,868,502,374,919, 791,736,608,242,114,970,842,476,348,893,710,582,216,1021,944,816,761,450,322, 995,867,684,556,190,918,790,735,424,296,969,841,658,530,164,892,398,270,1020, 943,815,760,632,138,994,866,500,372,917,789,734,606,240,112,968,840,474,346, 891,708,580,214,1019,942,814,759,448,320,993,865,682,554,188,916,788,733,422, 294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992,864,498,370, 915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446, 318,991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,394,266, 1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470, 342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729, 418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494, 366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442, 314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,390,262, 1012,935,807,752,624,130,986,858,492,364,909,781,726,598,232,960,832,466,338, 883,700,572,206,1011] [ns_server:debug,2014-08-19T16:50:08.163,ns_1@10.242.238.88:<0.2547.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 711 into 'ns_1@10.242.238.91' is <18126.23118.0> [ns_server:debug,2014-08-19T16:50:08.165,ns_1@10.242.238.88:<0.2547.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 711 into 'ns_1@10.242.238.90' is <18125.21863.0> [rebalance:debug,2014-08-19T16:50:08.165,ns_1@10.242.238.88:<0.2539.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 711 is <0.2547.1> [ns_server:debug,2014-08-19T16:50:08.200,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,191341}, tap_estimate, {replica_building,"default",711,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23118.0>, <<"replication_building_711_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:08.214,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,205518}, tap_estimate, {replica_building,"default",711,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21863.0>, <<"replication_building_711_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:08.215,ns_1@10.242.238.88:<0.2548.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.21863.0>}, {'ns_1@10.242.238.91',<18126.23118.0>}]) [rebalance:info,2014-08-19T16:50:08.215,ns_1@10.242.238.88:<0.2539.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:08.215,ns_1@10.242.238.88:<0.2539.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 711 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:08.216,ns_1@10.242.238.88:<0.2539.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:08.216,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{711, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:08.220,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{455, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:08.220,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",455, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.2560.1>) [ns_server:debug,2014-08-19T16:50:08.220,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 455) [ns_server:debug,2014-08-19T16:50:08.221,ns_1@10.242.238.88:<0.2561.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:08.221,ns_1@10.242.238.88:<0.2561.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:08.221,ns_1@10.242.238.88:<0.2560.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 455 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:08.221,ns_1@10.242.238.88:<0.2566.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 455 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:08.221,ns_1@10.242.238.88:<0.2567.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 455 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:08.226,ns_1@10.242.238.88:<0.2568.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 455 into 'ns_1@10.242.238.91' is <18126.23137.0> [ns_server:debug,2014-08-19T16:50:08.228,ns_1@10.242.238.88:<0.2568.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 455 into 'ns_1@10.242.238.89' is <18124.26662.0> [rebalance:debug,2014-08-19T16:50:08.228,ns_1@10.242.238.88:<0.2560.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 455 is <0.2568.1> [views:debug,2014-08-19T16:50:08.233,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/715. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:08.234,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",715,active,0} [ns_server:debug,2014-08-19T16:50:08.262,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,253375}, tap_estimate, {replica_building,"default",455,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23137.0>, <<"replication_building_455_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:08.279,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,270158}, tap_estimate, {replica_building,"default",455,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26662.0>, <<"replication_building_455_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:08.279,ns_1@10.242.238.88:<0.2569.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26662.0>}, {'ns_1@10.242.238.91',<18126.23137.0>}]) [rebalance:info,2014-08-19T16:50:08.279,ns_1@10.242.238.88:<0.2560.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:08.280,ns_1@10.242.238.88:<0.2560.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 455 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:08.280,ns_1@10.242.238.88:<0.2560.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:08.281,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{455, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:08.284,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{964, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:08.285,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",964, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.2581.1>) [ns_server:debug,2014-08-19T16:50:08.285,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 964) [ns_server:debug,2014-08-19T16:50:08.285,ns_1@10.242.238.88:<0.2582.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:08.285,ns_1@10.242.238.88:<0.2582.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:08.285,ns_1@10.242.238.88:<0.2581.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 964 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:08.286,ns_1@10.242.238.88:<0.2587.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 964 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:08.286,ns_1@10.242.238.88:<0.2588.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 964 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:08.291,ns_1@10.242.238.88:<0.2589.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 964 into 'ns_1@10.242.238.90' is <18125.21883.0> [ns_server:debug,2014-08-19T16:50:08.293,ns_1@10.242.238.88:<0.2589.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 964 into 'ns_1@10.242.238.91' is <18126.23142.0> [rebalance:debug,2014-08-19T16:50:08.293,ns_1@10.242.238.88:<0.2581.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 964 is <0.2589.1> [ns_server:debug,2014-08-19T16:50:08.326,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,317894}, tap_estimate, {replica_building,"default",964,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21883.0>, <<"replication_building_964_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:08.341,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,332966}, tap_estimate, {replica_building,"default",964,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23142.0>, <<"replication_building_964_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:08.342,ns_1@10.242.238.88:<0.2590.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.23142.0>}, {'ns_1@10.242.238.90',<18125.21883.0>}]) [rebalance:info,2014-08-19T16:50:08.342,ns_1@10.242.238.88:<0.2581.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:08.343,ns_1@10.242.238.88:<0.2581.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 964 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:08.343,ns_1@10.242.238.88:<0.2581.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:08.344,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{964, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:08.348,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{710, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:08.348,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",710, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.2616.1>) [ns_server:debug,2014-08-19T16:50:08.348,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 710) [ns_server:debug,2014-08-19T16:50:08.349,ns_1@10.242.238.88:<0.2617.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:08.349,ns_1@10.242.238.88:<0.2617.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:08.349,ns_1@10.242.238.88:<0.2616.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 710 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:08.349,ns_1@10.242.238.88:<0.2622.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 710 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:08.349,ns_1@10.242.238.88:<0.2623.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 710 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:08.353,ns_1@10.242.238.88:<0.2624.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 710 into 'ns_1@10.242.238.91' is <18126.23148.0> [ns_server:debug,2014-08-19T16:50:08.355,ns_1@10.242.238.88:<0.2624.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 710 into 'ns_1@10.242.238.90' is <18125.21888.0> [rebalance:debug,2014-08-19T16:50:08.355,ns_1@10.242.238.88:<0.2616.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 710 is <0.2624.1> [ns_server:debug,2014-08-19T16:50:08.389,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,380715}, tap_estimate, {replica_building,"default",710,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23148.0>, <<"replication_building_710_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:08.404,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,395310}, tap_estimate, {replica_building,"default",710,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21888.0>, <<"replication_building_710_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:08.404,ns_1@10.242.238.88:<0.2625.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.21888.0>}, {'ns_1@10.242.238.91',<18126.23148.0>}]) [rebalance:info,2014-08-19T16:50:08.404,ns_1@10.242.238.88:<0.2616.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:08.405,ns_1@10.242.238.88:<0.2616.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 710 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:08.405,ns_1@10.242.238.88:<0.2616.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:08.406,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{710, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:08.408,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 713. Nacking mccouch update. [views:debug,2014-08-19T16:50:08.408,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/713. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:08.409,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",713,active,0} [ns_server:debug,2014-08-19T16:50:08.410,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{454, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:08.410,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",454, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.2637.1>) [ns_server:debug,2014-08-19T16:50:08.410,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 454) [ns_server:debug,2014-08-19T16:50:08.410,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360, 905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308, 981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,384,256,1006, 929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332, 877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408, 280,953,825,642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484, 356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432, 304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,508,380, 1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588,222,950,822,767, 456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770, 715,404,276,949,821,766,638,144,872,506,378,1000,923,795,740,612,246,118,974, 846,480,352,897,769,714,586,220,948,820,765,454,326,999,871,688,560,194,922, 794,739,428,300,973,845,662,534,168,896,768,713,402,274,947,819,764,636,142, 998,934,870,806,751,504,440,376,312,985,921,857,793,738,674,610,546,244,180, 116,972,908,844,780,725,478,414,350,286,959,895,831,712,648,584,520,218,154, 1023,946,882,818,763,452,388,324,260,1010,997,869,686,558,192,920,792,737, 426,298,971,843,660,532,166,894,400,272,1022,945,817,762,634,140,996,868,502, 374,919,791,736,608,242,114,970,842,476,348,893,710,582,216,1021,944,816,761, 450,322,995,867,684,556,190,918,790,735,424,296,969,841,658,530,164,892,398, 270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606,240,112,968,840, 474,346,891,708,580,214,1019,942,814,759,448,320,993,865,682,554,188,916,788, 733,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992,864, 498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812, 757,446,318,991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888, 394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964, 836,470,342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912, 784,729,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988, 860,494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808, 753,442,314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884, 390,262,1012,935,807,752,624,130,986,858,492,364,909,781,726,598,232,960,832, 466,338,883,700,572,206,1011] [ns_server:debug,2014-08-19T16:50:08.411,ns_1@10.242.238.88:<0.2638.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:08.411,ns_1@10.242.238.88:<0.2638.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:08.411,ns_1@10.242.238.88:<0.2637.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 454 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:08.411,ns_1@10.242.238.88:<0.2643.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 454 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:08.411,ns_1@10.242.238.88:<0.2644.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 454 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:08.415,ns_1@10.242.238.88:<0.2645.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 454 into 'ns_1@10.242.238.91' is <18126.23167.0> [ns_server:debug,2014-08-19T16:50:08.416,ns_1@10.242.238.88:<0.2645.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 454 into 'ns_1@10.242.238.89' is <18124.26682.0> [rebalance:debug,2014-08-19T16:50:08.416,ns_1@10.242.238.88:<0.2637.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 454 is <0.2645.1> [ns_server:debug,2014-08-19T16:50:08.452,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,443762}, tap_estimate, {replica_building,"default",454,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23167.0>, <<"replication_building_454_'ns_1@10.242.238.91'">>} [views:debug,2014-08-19T16:50:08.460,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/713. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:08.461,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",713,active,0} [ns_server:debug,2014-08-19T16:50:08.466,ns_1@10.242.238.88:<0.2646.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26682.0>}, {'ns_1@10.242.238.91',<18126.23167.0>}]) [rebalance:info,2014-08-19T16:50:08.466,ns_1@10.242.238.88:<0.2637.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:08.466,ns_1@10.242.238.88:<0.2637.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 454 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:08.467,ns_1@10.242.238.88:<0.2637.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:08.467,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{454, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:08.471,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,456513}, tap_estimate, {replica_building,"default",454,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26682.0>, <<"replication_building_454_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:08.473,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{963, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:08.473,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",963, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.2659.1>) [ns_server:debug,2014-08-19T16:50:08.473,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 963) [ns_server:debug,2014-08-19T16:50:08.474,ns_1@10.242.238.88:<0.2660.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:08.474,ns_1@10.242.238.88:<0.2660.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:08.474,ns_1@10.242.238.88:<0.2659.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 963 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:08.474,ns_1@10.242.238.88:<0.2665.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 963 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:08.474,ns_1@10.242.238.88:<0.2666.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 963 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:08.478,ns_1@10.242.238.88:<0.2667.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 963 into 'ns_1@10.242.238.90' is <18125.21916.0> [ns_server:debug,2014-08-19T16:50:08.479,ns_1@10.242.238.88:<0.2667.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 963 into 'ns_1@10.242.238.91' is <18126.23172.0> [rebalance:debug,2014-08-19T16:50:08.479,ns_1@10.242.238.88:<0.2659.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 963 is <0.2667.1> [ns_server:debug,2014-08-19T16:50:08.515,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,505957}, tap_estimate, {replica_building,"default",963,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21916.0>, <<"replication_building_963_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:08.530,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,521875}, tap_estimate, {replica_building,"default",963,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23172.0>, <<"replication_building_963_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:08.531,ns_1@10.242.238.88:<0.2668.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.23172.0>}, {'ns_1@10.242.238.90',<18125.21916.0>}]) [rebalance:info,2014-08-19T16:50:08.531,ns_1@10.242.238.88:<0.2659.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:08.532,ns_1@10.242.238.88:<0.2659.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 963 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:08.532,ns_1@10.242.238.88:<0.2659.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:08.533,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{963, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:08.537,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{709, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:08.537,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",709, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.2694.1>) [ns_server:debug,2014-08-19T16:50:08.537,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 709) [ns_server:debug,2014-08-19T16:50:08.538,ns_1@10.242.238.88:<0.2695.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:08.538,ns_1@10.242.238.88:<0.2695.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:08.538,ns_1@10.242.238.88:<0.2694.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 709 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:08.538,ns_1@10.242.238.88:<0.2700.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 709 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:08.538,ns_1@10.242.238.88:<0.2701.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 709 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:08.542,ns_1@10.242.238.88:<0.2702.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 709 into 'ns_1@10.242.238.91' is <18126.23184.0> [ns_server:debug,2014-08-19T16:50:08.544,ns_1@10.242.238.88:<0.2702.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 709 into 'ns_1@10.242.238.90' is <18125.21923.0> [rebalance:debug,2014-08-19T16:50:08.544,ns_1@10.242.238.88:<0.2694.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 709 is <0.2702.1> [ns_server:debug,2014-08-19T16:50:08.569,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 711. Nacking mccouch update. [views:debug,2014-08-19T16:50:08.569,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/711. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:08.569,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",711,active,0} [ns_server:debug,2014-08-19T16:50:08.570,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360, 905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308, 981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,384,256,1006, 929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332, 877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408, 280,953,825,642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484, 356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432, 304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,508,380, 1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588,222,950,822,767, 456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770, 715,404,276,949,821,766,638,144,872,506,378,1000,923,795,740,612,246,118,974, 846,480,352,897,769,714,586,220,948,820,765,454,326,999,871,688,560,194,922, 794,739,428,300,973,845,662,534,168,896,768,713,402,274,947,819,764,636,142, 998,934,870,806,751,504,440,376,312,985,921,857,793,738,674,610,546,244,180, 116,972,908,844,780,725,478,414,350,286,959,895,831,712,648,584,520,218,154, 1023,946,882,818,763,452,388,324,260,1010,997,869,686,558,192,920,792,737, 426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762,634,140,996,868, 502,374,919,791,736,608,242,114,970,842,476,348,893,710,582,216,1021,944,816, 761,450,322,995,867,684,556,190,918,790,735,424,296,969,841,658,530,164,892, 398,270,1020,943,815,760,632,138,994,866,500,372,917,789,734,606,240,112,968, 840,474,346,891,708,580,214,1019,942,814,759,448,320,993,865,682,554,188,916, 788,733,422,294,967,839,656,528,162,890,396,268,1018,941,813,758,630,136,992, 864,498,370,915,787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940, 812,757,446,318,991,863,680,552,186,914,786,731,420,292,965,837,654,526,160, 888,394,266,1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108, 964,836,470,342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184, 912,784,729,418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132, 988,860,494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936, 808,753,442,314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156, 884,390,262,1012,935,807,752,624,130,986,858,492,364,909,781,726,598,232,960, 832,466,338,883,700,572,206,1011] [ns_server:debug,2014-08-19T16:50:08.580,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,571467}, tap_estimate, {replica_building,"default",709,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23184.0>, <<"replication_building_709_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:08.594,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,585105}, tap_estimate, {replica_building,"default",709,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21923.0>, <<"replication_building_709_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:08.594,ns_1@10.242.238.88:<0.2703.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.21923.0>}, {'ns_1@10.242.238.91',<18126.23184.0>}]) [rebalance:info,2014-08-19T16:50:08.594,ns_1@10.242.238.88:<0.2694.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:08.595,ns_1@10.242.238.88:<0.2694.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 709 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:08.595,ns_1@10.242.238.88:<0.2694.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:08.596,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{709, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:08.600,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{453, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:08.600,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",453, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.2715.1>) [ns_server:debug,2014-08-19T16:50:08.600,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 453) [ns_server:debug,2014-08-19T16:50:08.600,ns_1@10.242.238.88:<0.2716.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:08.601,ns_1@10.242.238.88:<0.2716.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:08.601,ns_1@10.242.238.88:<0.2715.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 453 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:08.601,ns_1@10.242.238.88:<0.2721.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 453 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:08.601,ns_1@10.242.238.88:<0.2722.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 453 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:08.606,ns_1@10.242.238.88:<0.2723.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 453 into 'ns_1@10.242.238.91' is <18126.23204.0> [ns_server:debug,2014-08-19T16:50:08.608,ns_1@10.242.238.88:<0.2723.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 453 into 'ns_1@10.242.238.89' is <18124.26702.0> [rebalance:debug,2014-08-19T16:50:08.608,ns_1@10.242.238.88:<0.2715.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 453 is <0.2723.1> [views:debug,2014-08-19T16:50:08.627,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/711. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:08.628,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",711,active,0} [ns_server:debug,2014-08-19T16:50:08.646,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,636986}, tap_estimate, {replica_building,"default",453,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23204.0>, <<"replication_building_453_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:08.662,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,653262}, tap_estimate, {replica_building,"default",453,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26702.0>, <<"replication_building_453_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:08.662,ns_1@10.242.238.88:<0.2724.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26702.0>}, {'ns_1@10.242.238.91',<18126.23204.0>}]) [rebalance:info,2014-08-19T16:50:08.663,ns_1@10.242.238.88:<0.2715.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:08.663,ns_1@10.242.238.88:<0.2715.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 453 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:08.664,ns_1@10.242.238.88:<0.2715.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:08.664,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{453, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:08.668,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{962, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:08.668,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",962, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.2744.1>) [ns_server:debug,2014-08-19T16:50:08.668,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 962) [ns_server:debug,2014-08-19T16:50:08.669,ns_1@10.242.238.88:<0.2747.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:08.669,ns_1@10.242.238.88:<0.2747.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:08.669,ns_1@10.242.238.88:<0.2744.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 962 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:08.669,ns_1@10.242.238.88:<0.2756.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 962 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:08.669,ns_1@10.242.238.88:<0.2757.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 962 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:08.673,ns_1@10.242.238.88:<0.2758.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 962 into 'ns_1@10.242.238.90' is <18125.21943.0> [ns_server:debug,2014-08-19T16:50:08.676,ns_1@10.242.238.88:<0.2758.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 962 into 'ns_1@10.242.238.91' is <18126.23209.0> [rebalance:debug,2014-08-19T16:50:08.676,ns_1@10.242.238.88:<0.2744.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 962 is <0.2758.1> [ns_server:debug,2014-08-19T16:50:08.703,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 709. Nacking mccouch update. [views:debug,2014-08-19T16:50:08.703,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/709. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:08.703,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",709,active,0} [ns_server:debug,2014-08-19T16:50:08.704,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360, 905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308, 981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,384,256,1006, 929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332, 877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408, 280,953,825,642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484, 356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432, 304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,508,380, 1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588,222,950,822,767, 456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770, 715,404,276,949,821,766,638,144,872,506,378,1000,923,795,740,612,246,118,974, 846,480,352,897,769,714,586,220,948,820,765,454,326,999,871,688,560,194,922, 794,739,428,300,973,845,662,534,168,896,768,713,402,274,947,819,764,636,142, 998,870,504,376,985,921,857,793,738,674,610,546,244,180,116,972,908,844,780, 725,478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818,763, 452,388,324,260,1010,997,869,686,558,192,920,792,737,426,298,971,843,660,532, 166,894,711,400,272,1022,945,817,762,634,140,996,868,502,374,919,791,736,608, 242,114,970,842,476,348,893,710,582,216,1021,944,816,761,450,322,995,867,684, 556,190,918,790,735,424,296,969,841,658,530,164,892,709,398,270,1020,943,815, 760,632,138,994,866,500,372,917,789,734,606,240,112,968,840,474,346,891,708, 580,214,1019,942,814,759,448,320,993,865,682,554,188,916,788,733,422,294,967, 839,656,528,162,890,396,268,1018,941,813,758,630,136,992,864,498,370,915,787, 732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318,991, 863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,394,266,1016,939, 811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470,342,887, 704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290, 963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494,366,911, 783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314,987, 859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,390,262,1012,935, 807,752,624,130,986,858,492,364,909,781,726,598,232,960,832,466,338,883,700, 572,206,1011,934,806,751,440,312] [ns_server:debug,2014-08-19T16:50:08.710,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,701533}, tap_estimate, {replica_building,"default",962,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21943.0>, <<"replication_building_962_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:08.725,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,716349}, tap_estimate, {replica_building,"default",962,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23209.0>, <<"replication_building_962_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:08.725,ns_1@10.242.238.88:<0.2759.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.23209.0>}, {'ns_1@10.242.238.90',<18125.21943.0>}]) [rebalance:info,2014-08-19T16:50:08.726,ns_1@10.242.238.88:<0.2744.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:08.726,ns_1@10.242.238.88:<0.2744.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 962 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:08.727,ns_1@10.242.238.88:<0.2744.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:08.727,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{962, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:08.731,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{708, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:08.731,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",708, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.2771.1>) [ns_server:debug,2014-08-19T16:50:08.731,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 708) [ns_server:debug,2014-08-19T16:50:08.732,ns_1@10.242.238.88:<0.2772.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:08.732,ns_1@10.242.238.88:<0.2772.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:08.732,ns_1@10.242.238.88:<0.2771.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 708 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:08.732,ns_1@10.242.238.88:<0.2777.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 708 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:08.732,ns_1@10.242.238.88:<0.2778.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 708 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:08.736,ns_1@10.242.238.88:<0.2779.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 708 into 'ns_1@10.242.238.91' is <18126.23215.0> [views:debug,2014-08-19T16:50:08.737,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/709. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:08.737,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",709,active,0} [ns_server:debug,2014-08-19T16:50:08.741,ns_1@10.242.238.88:<0.2779.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 708 into 'ns_1@10.242.238.90' is <18125.21948.0> [rebalance:debug,2014-08-19T16:50:08.741,ns_1@10.242.238.88:<0.2771.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 708 is <0.2779.1> [ns_server:debug,2014-08-19T16:50:08.772,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,763753}, tap_estimate, {replica_building,"default",708,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23215.0>, <<"replication_building_708_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:08.791,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,782327}, tap_estimate, {replica_building,"default",708,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21948.0>, <<"replication_building_708_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:08.791,ns_1@10.242.238.88:<0.2780.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.21948.0>}, {'ns_1@10.242.238.91',<18126.23215.0>}]) [rebalance:info,2014-08-19T16:50:08.792,ns_1@10.242.238.88:<0.2771.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:08.792,ns_1@10.242.238.88:<0.2771.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 708 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:08.793,ns_1@10.242.238.88:<0.2771.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:08.793,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{708, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:08.797,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{452, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:08.798,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",452, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.2806.1>) [ns_server:debug,2014-08-19T16:50:08.798,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 452) [ns_server:debug,2014-08-19T16:50:08.798,ns_1@10.242.238.88:<0.2807.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:08.798,ns_1@10.242.238.88:<0.2807.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:08.798,ns_1@10.242.238.88:<0.2806.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 452 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:08.799,ns_1@10.242.238.88:<0.2812.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 452 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:08.799,ns_1@10.242.238.88:<0.2813.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 452 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:08.802,ns_1@10.242.238.88:<0.2814.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 452 into 'ns_1@10.242.238.91' is <18126.23234.0> [ns_server:debug,2014-08-19T16:50:08.804,ns_1@10.242.238.88:<0.2814.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 452 into 'ns_1@10.242.238.89' is <18124.26736.0> [rebalance:debug,2014-08-19T16:50:08.804,ns_1@10.242.238.88:<0.2806.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 452 is <0.2814.1> [ns_server:debug,2014-08-19T16:50:08.813,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 707. Nacking mccouch update. [views:debug,2014-08-19T16:50:08.813,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/707. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:08.813,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",707,active,0} [ns_server:debug,2014-08-19T16:50:08.815,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360, 905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308, 981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,384,256,1006, 929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332, 877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408, 280,953,825,642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484, 356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432, 304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,508,380, 1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588,222,950,822,767, 456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770, 715,404,276,949,821,766,638,144,872,506,378,1000,923,795,740,612,246,118,974, 846,480,352,897,769,714,586,220,948,820,765,454,326,999,871,688,560,194,922, 794,739,428,300,973,845,662,534,168,896,768,713,402,274,947,819,764,636,142, 998,870,504,376,985,921,857,793,738,674,610,546,244,180,116,972,908,844,780, 725,478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818,763, 452,388,324,260,1010,997,869,686,558,192,920,792,737,426,298,971,843,660,532, 166,894,711,400,272,1022,945,817,762,634,140,996,868,502,374,919,791,736,608, 242,114,970,842,476,348,893,710,582,216,1021,944,816,761,450,322,995,867,684, 556,190,918,790,735,424,296,969,841,658,530,164,892,709,398,270,1020,943,815, 760,632,138,994,866,500,372,917,789,734,606,240,112,968,840,474,346,891,708, 580,214,1019,942,814,759,448,320,993,865,682,554,188,916,788,733,422,294,967, 839,656,528,162,890,707,396,268,1018,941,813,758,630,136,992,864,498,370,915, 787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318, 991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,394,266,1016, 939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470,342, 887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418, 290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494,366, 911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314, 987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,390,262,1012, 935,807,752,624,130,986,858,492,364,909,781,726,598,232,960,832,466,338,883, 700,572,206,1011,934,806,751,440,312] [ns_server:debug,2014-08-19T16:50:08.837,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,828547}, tap_estimate, {replica_building,"default",452,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23234.0>, <<"replication_building_452_'ns_1@10.242.238.91'">>} [views:debug,2014-08-19T16:50:08.847,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/707. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:08.847,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",707,active,0} [ns_server:debug,2014-08-19T16:50:08.854,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,845031}, tap_estimate, {replica_building,"default",452,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26736.0>, <<"replication_building_452_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:08.854,ns_1@10.242.238.88:<0.2815.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26736.0>}, {'ns_1@10.242.238.91',<18126.23234.0>}]) [rebalance:info,2014-08-19T16:50:08.854,ns_1@10.242.238.88:<0.2806.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:08.855,ns_1@10.242.238.88:<0.2806.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 452 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:08.855,ns_1@10.242.238.88:<0.2806.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:08.856,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{452, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:08.860,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{961, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:08.860,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",961, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.2827.1>) [ns_server:debug,2014-08-19T16:50:08.860,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 961) [ns_server:debug,2014-08-19T16:50:08.860,ns_1@10.242.238.88:<0.2828.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:08.860,ns_1@10.242.238.88:<0.2828.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:08.861,ns_1@10.242.238.88:<0.2827.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 961 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:08.861,ns_1@10.242.238.88:<0.2834.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 961 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [rebalance:info,2014-08-19T16:50:08.861,ns_1@10.242.238.88:<0.2833.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 961 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [ns_server:debug,2014-08-19T16:50:08.865,ns_1@10.242.238.88:<0.2835.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 961 into 'ns_1@10.242.238.90' is <18125.21968.0> [ns_server:debug,2014-08-19T16:50:08.867,ns_1@10.242.238.88:<0.2835.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 961 into 'ns_1@10.242.238.91' is <18126.23239.0> [rebalance:debug,2014-08-19T16:50:08.867,ns_1@10.242.238.88:<0.2827.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 961 is <0.2835.1> [ns_server:debug,2014-08-19T16:50:08.900,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,891574}, tap_estimate, {replica_building,"default",961,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21968.0>, <<"replication_building_961_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:08.919,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,910520}, tap_estimate, {replica_building,"default",961,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23239.0>, <<"replication_building_961_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:08.920,ns_1@10.242.238.88:<0.2836.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.23239.0>}, {'ns_1@10.242.238.90',<18125.21968.0>}]) [rebalance:info,2014-08-19T16:50:08.920,ns_1@10.242.238.88:<0.2827.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:08.920,ns_1@10.242.238.88:<0.2827.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 961 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:08.921,ns_1@10.242.238.88:<0.2827.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:08.921,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{961, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:08.922,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 705. Nacking mccouch update. [views:debug,2014-08-19T16:50:08.922,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/705. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:08.923,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",705,active,0} [ns_server:debug,2014-08-19T16:50:08.924,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360, 905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308, 981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,384,256,1006, 929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332, 877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408, 280,953,825,642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484, 356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432, 304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,508,380, 1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588,222,950,822,767, 456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770, 715,404,276,949,821,766,638,144,872,506,378,1000,923,795,740,612,246,118,974, 846,480,352,897,769,714,586,220,948,820,765,454,326,999,871,688,560,194,922, 794,739,428,300,973,845,662,534,168,896,768,713,402,274,947,819,764,636,142, 998,870,504,376,985,921,857,793,738,674,610,546,244,180,116,972,908,844,780, 725,478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818,763, 452,388,324,260,1010,997,869,686,558,192,920,792,737,426,298,971,843,660,532, 166,894,711,400,272,1022,945,817,762,634,140,996,868,502,374,919,791,736,608, 242,114,970,842,476,348,893,710,582,216,1021,944,816,761,450,322,995,867,684, 556,190,918,790,735,424,296,969,841,658,530,164,892,709,398,270,1020,943,815, 760,632,138,994,866,500,372,917,789,734,606,240,112,968,840,474,346,891,708, 580,214,1019,942,814,759,448,320,993,865,682,554,188,916,788,733,422,294,967, 839,656,528,162,890,707,396,268,1018,941,813,758,630,136,992,864,498,370,915, 787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318, 991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266, 1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470, 342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729, 418,290,963,835,652,524,158,886,392,264,1014,937,809,754,626,132,988,860,494, 366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442, 314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,390,262, 1012,935,807,752,624,130,986,858,492,364,909,781,726,598,232,960,832,466,338, 883,700,572,206,1011,934,806,751,440,312] [ns_server:debug,2014-08-19T16:50:08.925,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{707, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:08.925,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",707, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.2862.1>) [ns_server:debug,2014-08-19T16:50:08.925,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 707) [ns_server:debug,2014-08-19T16:50:08.926,ns_1@10.242.238.88:<0.2863.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:08.926,ns_1@10.242.238.88:<0.2863.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:08.926,ns_1@10.242.238.88:<0.2862.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 707 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:08.926,ns_1@10.242.238.88:<0.2868.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 707 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:08.926,ns_1@10.242.238.88:<0.2869.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 707 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:08.930,ns_1@10.242.238.88:<0.2870.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 707 into 'ns_1@10.242.238.91' is <18126.23259.0> [ns_server:debug,2014-08-19T16:50:08.932,ns_1@10.242.238.88:<0.2870.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 707 into 'ns_1@10.242.238.90' is <18125.21973.0> [rebalance:debug,2014-08-19T16:50:08.933,ns_1@10.242.238.88:<0.2862.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 707 is <0.2870.1> [views:debug,2014-08-19T16:50:08.956,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/705. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:08.956,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",705,active,0} [ns_server:debug,2014-08-19T16:50:08.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,957611}, tap_estimate, {replica_building,"default",707,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23259.0>, <<"replication_building_707_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:08.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452608,973850}, tap_estimate, {replica_building,"default",707,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21973.0>, <<"replication_building_707_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:08.983,ns_1@10.242.238.88:<0.2871.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.21973.0>}, {'ns_1@10.242.238.91',<18126.23259.0>}]) [rebalance:info,2014-08-19T16:50:08.983,ns_1@10.242.238.88:<0.2862.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:08.984,ns_1@10.242.238.88:<0.2862.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 707 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:08.984,ns_1@10.242.238.88:<0.2862.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:08.985,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{707, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:08.989,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{451, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:08.989,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",451, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.2883.1>) [ns_server:debug,2014-08-19T16:50:08.989,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 451) [ns_server:debug,2014-08-19T16:50:08.989,ns_1@10.242.238.88:<0.2884.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:08.989,ns_1@10.242.238.88:<0.2884.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:08.989,ns_1@10.242.238.88:<0.2883.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 451 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:08.990,ns_1@10.242.238.88:<0.2889.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 451 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:08.990,ns_1@10.242.238.88:<0.2890.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 451 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:08.994,ns_1@10.242.238.88:<0.2896.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 451 into 'ns_1@10.242.238.91' is <18126.23264.0> [ns_server:debug,2014-08-19T16:50:08.996,ns_1@10.242.238.88:<0.2896.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 451 into 'ns_1@10.242.238.89' is <18124.26756.0> [rebalance:debug,2014-08-19T16:50:08.996,ns_1@10.242.238.88:<0.2883.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 451 is <0.2896.1> [ns_server:debug,2014-08-19T16:50:09.033,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452609,24209}, tap_estimate, {replica_building,"default",451,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23264.0>, <<"replication_building_451_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:09.049,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452609,40827}, tap_estimate, {replica_building,"default",451,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26756.0>, <<"replication_building_451_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:09.050,ns_1@10.242.238.88:<0.2906.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26756.0>}, {'ns_1@10.242.238.91',<18126.23264.0>}]) [rebalance:info,2014-08-19T16:50:09.050,ns_1@10.242.238.88:<0.2883.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:09.051,ns_1@10.242.238.88:<0.2883.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 451 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:09.051,ns_1@10.242.238.88:<0.2883.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:09.052,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{451, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:09.057,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{960, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:09.057,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",960, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.2918.1>) [ns_server:debug,2014-08-19T16:50:09.057,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 960) [ns_server:debug,2014-08-19T16:50:09.057,ns_1@10.242.238.88:<0.2919.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:09.057,ns_1@10.242.238.88:<0.2919.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:09.057,ns_1@10.242.238.88:<0.2918.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 960 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:09.058,ns_1@10.242.238.88:<0.2924.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 960 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:09.058,ns_1@10.242.238.88:<0.2925.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 960 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:09.062,ns_1@10.242.238.88:<0.2926.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 960 into 'ns_1@10.242.238.90' is <18125.21993.0> [ns_server:debug,2014-08-19T16:50:09.063,ns_1@10.242.238.88:<0.2926.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 960 into 'ns_1@10.242.238.91' is <18126.23269.0> [rebalance:debug,2014-08-19T16:50:09.063,ns_1@10.242.238.88:<0.2918.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 960 is <0.2926.1> [ns_server:debug,2014-08-19T16:50:09.067,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 703. Nacking mccouch update. [views:debug,2014-08-19T16:50:09.067,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/703. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:09.067,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",703,active,0} [ns_server:debug,2014-08-19T16:50:09.069,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360, 905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308, 981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,384,256,1006, 929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332, 877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408, 280,953,825,642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484, 356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432, 304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,508,380, 1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588,222,950,822,767, 456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770, 715,404,276,949,821,766,638,144,872,506,378,1000,923,795,740,612,246,118,974, 846,480,352,897,769,714,586,220,948,820,765,454,326,999,871,688,560,194,922, 794,739,428,300,973,845,662,534,168,896,768,713,402,274,947,819,764,636,142, 998,870,504,376,985,921,857,793,738,674,610,546,244,180,116,972,908,844,780, 725,478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818,763, 452,388,324,260,1010,997,869,686,558,192,920,792,737,426,298,971,843,660,532, 166,894,711,400,272,1022,945,817,762,634,140,996,868,502,374,919,791,736,608, 242,114,970,842,476,348,893,710,582,216,1021,944,816,761,450,322,995,867,684, 556,190,918,790,735,424,296,969,841,658,530,164,892,709,398,270,1020,943,815, 760,632,138,994,866,500,372,917,789,734,606,240,112,968,840,474,346,891,708, 580,214,1019,942,814,759,448,320,993,865,682,554,188,916,788,733,422,294,967, 839,656,528,162,890,707,396,268,1018,941,813,758,630,136,992,864,498,370,915, 787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318, 991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266, 1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470, 342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729, 418,290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860, 494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753, 442,314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,390, 262,1012,935,807,752,624,130,986,858,492,364,909,781,726,598,232,960,832,466, 338,883,700,572,206,1011,934,806,751,440,312] [ns_server:debug,2014-08-19T16:50:09.099,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452609,90164}, tap_estimate, {replica_building,"default",960,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21993.0>, <<"replication_building_960_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:09.114,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452609,105060}, tap_estimate, {replica_building,"default",960,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23269.0>, <<"replication_building_960_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:09.114,ns_1@10.242.238.88:<0.2927.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.23269.0>}, {'ns_1@10.242.238.90',<18125.21993.0>}]) [rebalance:info,2014-08-19T16:50:09.114,ns_1@10.242.238.88:<0.2918.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:09.115,ns_1@10.242.238.88:<0.2918.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 960 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:09.115,ns_1@10.242.238.88:<0.2918.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:09.116,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{960, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [views:debug,2014-08-19T16:50:09.118,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/703. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:09.118,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",703,active,0} [ns_server:debug,2014-08-19T16:50:09.120,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{706, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:09.120,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",706, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.2939.1>) [ns_server:debug,2014-08-19T16:50:09.120,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 706) [ns_server:debug,2014-08-19T16:50:09.120,ns_1@10.242.238.88:<0.2940.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:09.120,ns_1@10.242.238.88:<0.2940.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:09.120,ns_1@10.242.238.88:<0.2939.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 706 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:09.121,ns_1@10.242.238.88:<0.2945.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 706 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:09.121,ns_1@10.242.238.88:<0.2946.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 706 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:09.124,ns_1@10.242.238.88:<0.2947.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 706 into 'ns_1@10.242.238.91' is <18126.23289.0> [ns_server:debug,2014-08-19T16:50:09.126,ns_1@10.242.238.88:<0.2947.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 706 into 'ns_1@10.242.238.90' is <18125.21998.0> [rebalance:debug,2014-08-19T16:50:09.126,ns_1@10.242.238.88:<0.2939.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 706 is <0.2947.1> [ns_server:info,2014-08-19T16:50:09.156,ns_1@10.242.238.88:<0.20999.0>:ns_orchestrator:handle_info:428]Skipping janitor in state rebalancing [ns_server:debug,2014-08-19T16:50:09.160,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452609,151525}, tap_estimate, {replica_building,"default",706,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23289.0>, <<"replication_building_706_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:09.179,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452609,170779}, tap_estimate, {replica_building,"default",706,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.21998.0>, <<"replication_building_706_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:09.180,ns_1@10.242.238.88:<0.2948.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.21998.0>}, {'ns_1@10.242.238.91',<18126.23289.0>}]) [rebalance:info,2014-08-19T16:50:09.180,ns_1@10.242.238.88:<0.2939.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:09.180,ns_1@10.242.238.88:<0.2939.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 706 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:09.181,ns_1@10.242.238.88:<0.2939.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:09.181,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{706, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:09.185,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{450, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:09.185,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",450, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.2965.1>) [ns_server:debug,2014-08-19T16:50:09.185,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 450) [ns_server:debug,2014-08-19T16:50:09.185,ns_1@10.242.238.88:<0.2966.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:09.185,ns_1@10.242.238.88:<0.2966.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:09.186,ns_1@10.242.238.88:<0.2965.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 450 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:09.186,ns_1@10.242.238.88:<0.2971.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 450 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:09.186,ns_1@10.242.238.88:<0.2972.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 450 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:09.190,ns_1@10.242.238.88:<0.2973.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 450 into 'ns_1@10.242.238.91' is <18126.23294.0> [ns_server:debug,2014-08-19T16:50:09.193,ns_1@10.242.238.88:<0.2973.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 450 into 'ns_1@10.242.238.89' is <18124.26777.0> [rebalance:debug,2014-08-19T16:50:09.193,ns_1@10.242.238.88:<0.2965.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 450 is <0.2973.1> [ns_server:debug,2014-08-19T16:50:09.225,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452609,216916}, tap_estimate, {replica_building,"default",450,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23294.0>, <<"replication_building_450_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:09.244,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452609,235972}, tap_estimate, {replica_building,"default",450,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26777.0>, <<"replication_building_450_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:09.245,ns_1@10.242.238.88:<0.2974.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26777.0>}, {'ns_1@10.242.238.91',<18126.23294.0>}]) [rebalance:info,2014-08-19T16:50:09.245,ns_1@10.242.238.88:<0.2965.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:09.246,ns_1@10.242.238.88:<0.2965.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 450 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:09.246,ns_1@10.242.238.88:<0.2965.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:09.247,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{450, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:09.250,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{705, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:09.250,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",705, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.3000.1>) [ns_server:debug,2014-08-19T16:50:09.250,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 705) [ns_server:debug,2014-08-19T16:50:09.251,ns_1@10.242.238.88:<0.3001.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:09.251,ns_1@10.242.238.88:<0.3001.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:09.251,ns_1@10.242.238.88:<0.3000.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 705 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:09.251,ns_1@10.242.238.88:<0.3006.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 705 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:09.251,ns_1@10.242.238.88:<0.3007.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 705 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:09.256,ns_1@10.242.238.88:<0.3008.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 705 into 'ns_1@10.242.238.91' is <18126.23313.0> [ns_server:debug,2014-08-19T16:50:09.258,ns_1@10.242.238.88:<0.3008.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 705 into 'ns_1@10.242.238.90' is <18125.22018.0> [rebalance:debug,2014-08-19T16:50:09.258,ns_1@10.242.238.88:<0.3000.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 705 is <0.3008.1> [ns_server:debug,2014-08-19T16:50:09.298,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452609,284076}, tap_estimate, {replica_building,"default",705,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23313.0>, <<"replication_building_705_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:09.301,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 701. Nacking mccouch update. [views:debug,2014-08-19T16:50:09.301,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/701. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:09.301,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",701,active,0} [ns_server:debug,2014-08-19T16:50:09.303,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360, 905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308, 981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,384,256,1006, 929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332, 877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408, 280,953,825,642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484, 356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432, 304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,508,380, 1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588,222,950,822,767, 456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770, 715,404,276,949,821,766,638,144,872,506,378,1000,923,795,740,612,246,118,974, 846,480,352,897,769,714,586,220,948,820,765,454,326,999,871,688,560,194,922, 794,739,428,300,973,845,662,534,168,896,768,713,402,274,947,819,764,636,142, 998,870,504,376,985,921,857,793,738,674,610,546,244,180,116,972,908,844,780, 725,478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818,763, 452,388,324,260,1010,997,869,686,558,192,920,792,737,426,298,971,843,660,532, 166,894,711,400,272,1022,945,817,762,634,140,996,868,502,374,919,791,736,608, 242,114,970,842,476,348,893,710,582,216,1021,944,816,761,450,322,995,867,684, 556,190,918,790,735,424,296,969,841,658,530,164,892,709,398,270,1020,943,815, 760,632,138,994,866,500,372,917,789,734,606,240,112,968,840,474,346,891,708, 580,214,1019,942,814,759,448,320,993,865,682,554,188,916,788,733,422,294,967, 839,656,528,162,890,707,396,268,1018,941,813,758,630,136,992,864,498,370,915, 787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318, 991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266, 1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470, 342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729, 418,290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860, 494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753, 442,314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701, 390,262,1012,935,807,752,624,130,986,858,492,364,909,781,726,598,232,960,832, 466,338,883,700,572,206,1011,934,806,751,440,312] [ns_server:debug,2014-08-19T16:50:09.308,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452609,299133}, tap_estimate, {replica_building,"default",705,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.22018.0>, <<"replication_building_705_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:09.308,ns_1@10.242.238.88:<0.3009.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.22018.0>}, {'ns_1@10.242.238.91',<18126.23313.0>}]) [rebalance:info,2014-08-19T16:50:09.308,ns_1@10.242.238.88:<0.3000.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:09.309,ns_1@10.242.238.88:<0.3000.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 705 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:09.309,ns_1@10.242.238.88:<0.3000.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:09.310,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{705, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:09.313,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{449, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [ns_server:debug,2014-08-19T16:50:09.313,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 449) [rebalance:debug,2014-08-19T16:50:09.313,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",449, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.3021.1>) [ns_server:debug,2014-08-19T16:50:09.314,ns_1@10.242.238.88:<0.3022.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:09.314,ns_1@10.242.238.88:<0.3022.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:09.314,ns_1@10.242.238.88:<0.3021.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 449 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:09.314,ns_1@10.242.238.88:<0.3027.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 449 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:09.314,ns_1@10.242.238.88:<0.3028.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 449 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:09.318,ns_1@10.242.238.88:<0.3029.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 449 into 'ns_1@10.242.238.91' is <18126.23318.0> [ns_server:debug,2014-08-19T16:50:09.320,ns_1@10.242.238.88:<0.3029.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 449 into 'ns_1@10.242.238.89' is <18124.26783.0> [rebalance:debug,2014-08-19T16:50:09.320,ns_1@10.242.238.88:<0.3021.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 449 is <0.3029.1> [ns_server:debug,2014-08-19T16:50:09.354,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452609,345569}, tap_estimate, {replica_building,"default",449,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23318.0>, <<"replication_building_449_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:09.370,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452609,361823}, tap_estimate, {replica_building,"default",449,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26783.0>, <<"replication_building_449_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:09.371,ns_1@10.242.238.88:<0.3030.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26783.0>}, {'ns_1@10.242.238.91',<18126.23318.0>}]) [rebalance:info,2014-08-19T16:50:09.371,ns_1@10.242.238.88:<0.3021.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:09.372,ns_1@10.242.238.88:<0.3021.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 449 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:09.372,ns_1@10.242.238.88:<0.3021.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:09.373,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{449, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:09.376,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{704, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:09.376,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",704, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.3042.1>) [ns_server:debug,2014-08-19T16:50:09.376,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 704) [ns_server:debug,2014-08-19T16:50:09.377,ns_1@10.242.238.88:<0.3043.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:09.377,ns_1@10.242.238.88:<0.3043.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:09.377,ns_1@10.242.238.88:<0.3042.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 704 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:09.377,ns_1@10.242.238.88:<0.3048.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 704 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:09.377,ns_1@10.242.238.88:<0.3049.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 704 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:09.381,ns_1@10.242.238.88:<0.3050.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 704 into 'ns_1@10.242.238.91' is <18126.23323.0> [ns_server:debug,2014-08-19T16:50:09.384,ns_1@10.242.238.88:<0.3050.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 704 into 'ns_1@10.242.238.90' is <18125.22038.0> [rebalance:debug,2014-08-19T16:50:09.384,ns_1@10.242.238.88:<0.3042.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 704 is <0.3050.1> [views:debug,2014-08-19T16:50:09.385,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/701. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:09.385,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",701,active,0} [ns_server:debug,2014-08-19T16:50:09.416,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452609,407619}, tap_estimate, {replica_building,"default",704,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23323.0>, <<"replication_building_704_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:09.433,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452609,424472}, tap_estimate, {replica_building,"default",704,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.22038.0>, <<"replication_building_704_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:09.434,ns_1@10.242.238.88:<0.3051.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.22038.0>}, {'ns_1@10.242.238.91',<18126.23323.0>}]) [rebalance:info,2014-08-19T16:50:09.434,ns_1@10.242.238.88:<0.3042.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:09.434,ns_1@10.242.238.88:<0.3042.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 704 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:09.435,ns_1@10.242.238.88:<0.3042.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:09.436,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{704, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:09.438,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{448, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:09.438,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",448, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.3063.1>) [ns_server:debug,2014-08-19T16:50:09.439,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 448) [ns_server:debug,2014-08-19T16:50:09.439,ns_1@10.242.238.88:<0.3064.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:09.439,ns_1@10.242.238.88:<0.3064.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:09.439,ns_1@10.242.238.88:<0.3063.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 448 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:09.439,ns_1@10.242.238.88:<0.3069.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 448 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:09.440,ns_1@10.242.238.88:<0.3070.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 448 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:09.444,ns_1@10.242.238.88:<0.3071.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 448 into 'ns_1@10.242.238.91' is <18126.23342.0> [ns_server:debug,2014-08-19T16:50:09.447,ns_1@10.242.238.88:<0.3071.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 448 into 'ns_1@10.242.238.89' is <18124.26789.0> [rebalance:debug,2014-08-19T16:50:09.447,ns_1@10.242.238.88:<0.3063.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 448 is <0.3071.1> [ns_server:debug,2014-08-19T16:50:09.480,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452609,471224}, tap_estimate, {replica_building,"default",448,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.23342.0>, <<"replication_building_448_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:09.495,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452609,486672}, tap_estimate, {replica_building,"default",448,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.26789.0>, <<"replication_building_448_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:09.496,ns_1@10.242.238.88:<0.3072.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.26789.0>}, {'ns_1@10.242.238.91',<18126.23342.0>}]) [rebalance:info,2014-08-19T16:50:09.496,ns_1@10.242.238.88:<0.3063.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:09.496,ns_1@10.242.238.88:<0.3063.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 448 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:09.497,ns_1@10.242.238.88:<0.3063.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:09.498,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{448, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:09.499,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:09.535,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 699. Nacking mccouch update. [views:debug,2014-08-19T16:50:09.536,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/699. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:09.536,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",699,active,0} [ns_server:debug,2014-08-19T16:50:09.537,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,386,258,1008,931,803,748,620,254,126,982,854,488,360, 905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308, 981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,384,256,1006, 929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460,332, 877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408, 280,953,825,642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850,484, 356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743,432, 304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,508,380, 1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588,222,950,822,767, 456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664,536,170,898,770, 715,404,276,949,821,766,638,144,872,506,378,1000,923,795,740,612,246,118,974, 846,480,352,897,769,714,586,220,948,820,765,454,326,999,871,688,560,194,922, 794,739,428,300,973,845,662,534,168,896,768,713,402,274,947,819,764,636,142, 998,870,504,376,921,793,738,610,244,116,972,908,844,780,725,478,414,350,286, 959,895,831,712,648,584,520,218,154,1023,946,882,818,763,699,452,388,324,260, 1010,997,869,686,558,192,920,792,737,426,298,971,843,660,532,166,894,711,400, 272,1022,945,817,762,634,140,996,868,502,374,919,791,736,608,242,114,970,842, 476,348,893,710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790, 735,424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994, 866,500,372,917,789,734,606,240,112,968,840,474,346,891,708,580,214,1019,942, 814,759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162, 890,707,396,268,1018,941,813,758,630,136,992,864,498,370,915,787,732,604,238, 110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680,552, 186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811,756, 628,134,990,862,496,368,913,785,730,602,236,108,964,836,470,342,887,704,576, 210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290,963,835, 652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,494,366,911,783, 728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314,987,859, 676,548,182,910,782,727,416,288,961,833,650,522,156,884,701,390,262,1012,935, 807,752,624,130,986,858,492,364,909,781,726,598,232,960,832,466,338,883,700, 572,206,1011,934,806,751,440,312,985,857,674,546,180] [views:debug,2014-08-19T16:50:09.619,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/699. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:09.620,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",699,active,0} [ns_server:debug,2014-08-19T16:50:09.795,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 697. Nacking mccouch update. [views:debug,2014-08-19T16:50:09.795,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/697. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:09.795,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",697,active,0} [ns_server:debug,2014-08-19T16:50:09.796,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,488, 360,905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436, 308,981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,384,256, 1006,929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826,460, 332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719, 408,280,953,825,642,514,148,876,510,382,1004,927,799,744,616,250,122,978,850, 484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743, 432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,508, 380,1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588,222,950,822, 767,456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664,536,170,898, 770,715,404,276,949,821,766,638,144,872,506,378,1000,923,795,740,612,246,118, 974,846,480,352,897,769,714,586,220,948,820,765,454,326,999,871,688,560,194, 922,794,739,428,300,973,845,662,534,168,896,768,713,402,274,947,819,764,636, 142,998,870,504,376,921,793,738,610,244,116,972,908,844,780,725,478,414,350, 286,959,895,831,712,648,584,520,218,154,1023,946,882,818,763,699,452,388,324, 260,1010,997,869,686,558,192,920,792,737,426,298,971,843,660,532,166,894,711, 400,272,1022,945,817,762,634,140,996,868,502,374,919,791,736,608,242,114,970, 842,476,348,893,710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918, 790,735,424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138, 994,866,500,372,917,789,734,606,240,112,968,840,474,346,891,708,580,214,1019, 942,814,759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528, 162,890,707,396,268,1018,941,813,758,630,136,992,864,498,370,915,787,732,604, 238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680, 552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811, 756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470,342,887,704, 576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290,963, 835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,494,366,911, 783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314,987, 859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701,390,262,1012, 935,807,752,624,130,986,858,492,364,909,781,726,598,232,960,832,466,338,883, 700,572,206,1011,934,806,751,440,312,985,857,674,546,180] [views:debug,2014-08-19T16:50:09.879,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/697. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:09.879,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",697,active,0} [ns_server:debug,2014-08-19T16:50:10.020,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 695. Nacking mccouch update. [views:debug,2014-08-19T16:50:10.020,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/695. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:10.021,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",695,active,0} [ns_server:debug,2014-08-19T16:50:10.022,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,488, 360,905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436, 308,981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,695,384, 256,1006,929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826, 460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774, 719,408,280,953,825,642,514,148,876,510,382,1004,927,799,744,616,250,122,978, 850,484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926,798, 743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874, 508,380,1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588,222,950, 822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664,536,170, 898,770,715,404,276,949,821,766,638,144,872,506,378,1000,923,795,740,612,246, 118,974,846,480,352,897,769,714,586,220,948,820,765,454,326,999,871,688,560, 194,922,794,739,428,300,973,845,662,534,168,896,768,713,402,274,947,819,764, 636,142,998,870,504,376,921,793,738,610,244,116,972,908,844,780,725,478,414, 350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818,763,699,452,388, 324,260,1010,997,869,686,558,192,920,792,737,426,298,971,843,660,532,166,894, 711,400,272,1022,945,817,762,634,140,996,868,502,374,919,791,736,608,242,114, 970,842,476,348,893,710,582,216,1021,944,816,761,450,322,995,867,684,556,190, 918,790,735,424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632, 138,994,866,500,372,917,789,734,606,240,112,968,840,474,346,891,708,580,214, 1019,942,814,759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656, 528,162,890,707,396,268,1018,941,813,758,630,136,992,864,498,370,915,787,732, 604,238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318,991,863, 680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939, 811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470,342,887, 704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290, 963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,494,366, 911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314, 987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701,390,262, 1012,935,807,752,624,130,986,858,492,364,909,781,726,598,232,960,832,466,338, 883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180] [views:debug,2014-08-19T16:50:10.082,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/695. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:10.082,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",695,active,0} [ns_server:debug,2014-08-19T16:50:10.165,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 693. Nacking mccouch update. [views:debug,2014-08-19T16:50:10.165,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/693. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:10.165,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",693,active,0} [ns_server:debug,2014-08-19T16:50:10.167,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,488, 360,905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436, 308,981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,695,384, 256,1006,929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826, 460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774, 719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122, 978,850,484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926, 798,743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146, 874,508,380,1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588,222, 950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664,536, 170,898,770,715,404,276,949,821,766,638,144,872,506,378,1000,923,795,740,612, 246,118,974,846,480,352,897,769,714,586,220,948,820,765,454,326,999,871,688, 560,194,922,794,739,428,300,973,845,662,534,168,896,768,713,402,274,947,819, 764,636,142,998,870,504,376,921,793,738,610,244,116,972,908,844,780,725,478, 414,350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818,763,699,452, 388,324,260,1010,997,869,686,558,192,920,792,737,426,298,971,843,660,532,166, 894,711,400,272,1022,945,817,762,634,140,996,868,502,374,919,791,736,608,242, 114,970,842,476,348,893,710,582,216,1021,944,816,761,450,322,995,867,684,556, 190,918,790,735,424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760, 632,138,994,866,500,372,917,789,734,606,240,112,968,840,474,346,891,708,580, 214,1019,942,814,759,448,320,993,865,682,554,188,916,788,733,422,294,967,839, 656,528,162,890,707,396,268,1018,941,813,758,630,136,992,864,498,370,915,787, 732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318,991, 863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016, 939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470,342, 887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418, 290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,494, 366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442, 314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701,390, 262,1012,935,807,752,624,130,986,858,492,364,909,781,726,598,232,960,832,466, 338,883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180] [views:debug,2014-08-19T16:50:10.199,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/693. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:10.199,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",693,active,0} [ns_server:debug,2014-08-19T16:50:10.274,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 691. Nacking mccouch update. [views:debug,2014-08-19T16:50:10.274,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/691. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:10.274,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",691,active,0} [ns_server:debug,2014-08-19T16:50:10.276,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,488, 360,905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436, 308,981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,695,384, 256,1006,929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826, 460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774, 719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122, 978,850,484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926, 798,743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146, 874,691,508,380,1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588, 222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664, 536,170,898,770,715,404,276,949,821,766,638,144,872,506,378,1000,923,795,740, 612,246,118,974,846,480,352,897,769,714,586,220,948,820,765,454,326,999,871, 688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713,402,274,947, 819,764,636,142,998,870,504,376,921,793,738,610,244,116,972,908,844,780,725, 478,414,350,286,959,895,831,712,648,584,520,218,154,1023,946,882,818,763,699, 452,388,324,260,1010,997,869,686,558,192,920,792,737,426,298,971,843,660,532, 166,894,711,400,272,1022,945,817,762,634,140,996,868,502,374,919,791,736,608, 242,114,970,842,476,348,893,710,582,216,1021,944,816,761,450,322,995,867,684, 556,190,918,790,735,424,296,969,841,658,530,164,892,709,398,270,1020,943,815, 760,632,138,994,866,500,372,917,789,734,606,240,112,968,840,474,346,891,708, 580,214,1019,942,814,759,448,320,993,865,682,554,188,916,788,733,422,294,967, 839,656,528,162,890,707,396,268,1018,941,813,758,630,136,992,864,498,370,915, 787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318, 991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266, 1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470, 342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729, 418,290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860, 494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753, 442,314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701, 390,262,1012,935,807,752,624,130,986,858,492,364,909,781,726,598,232,960,832, 466,338,883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180] [views:debug,2014-08-19T16:50:10.308,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/691. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:10.308,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",691,active,0} [ns_server:debug,2014-08-19T16:50:10.383,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 689. Nacking mccouch update. [views:debug,2014-08-19T16:50:10.383,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/689. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:10.384,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",689,active,0} [ns_server:debug,2014-08-19T16:50:10.386,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,488, 360,905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436, 308,981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,695,384, 256,1006,929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826, 460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774, 719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122, 978,850,484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926, 798,743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146, 874,691,508,380,1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588, 222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664, 536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795, 740,612,246,118,974,846,480,352,897,769,714,586,220,948,820,765,454,326,999, 871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713,402,274, 947,819,764,636,142,998,870,504,376,921,793,738,610,244,116,972,844,478,350, 959,895,831,712,648,584,520,218,154,1023,946,882,818,763,699,452,388,324,260, 1010,997,869,686,558,192,920,792,737,426,298,971,843,660,532,166,894,711,400, 272,1022,945,817,762,634,140,996,868,502,374,919,791,736,608,242,114,970,842, 476,348,893,710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790, 735,424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994, 866,500,372,917,789,734,606,240,112,968,840,474,346,891,708,580,214,1019,942, 814,759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162, 890,707,396,268,1018,941,813,758,630,136,992,864,498,370,915,787,732,604,238, 110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680,552, 186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811,756, 628,134,990,862,496,368,913,785,730,602,236,108,964,836,470,342,887,704,576, 210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290,963,835, 652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,494,366,911,783, 728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314,987,859, 676,548,182,910,782,727,416,288,961,833,650,522,156,884,701,390,262,1012,935, 807,752,624,130,986,858,492,364,909,781,726,598,232,960,832,466,338,883,700, 572,206,1011,934,806,751,440,312,985,857,674,546,180,908,780,725,414,286] [views:debug,2014-08-19T16:50:10.417,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/689. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:10.417,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",689,active,0} [ns_server:debug,2014-08-19T16:50:10.492,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 687. Nacking mccouch update. [views:debug,2014-08-19T16:50:10.493,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/687. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:10.493,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",687,active,0} [ns_server:debug,2014-08-19T16:50:10.494,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,488, 360,905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436, 308,981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,695,384, 256,1006,929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826, 460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774, 719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122, 978,850,484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926, 798,743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146, 874,691,508,380,1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588, 222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664, 536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795, 740,612,246,118,974,846,480,352,897,769,714,586,220,948,820,765,454,326,999, 871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713,402,274, 947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844,478, 350,959,895,831,712,648,584,520,218,154,1023,946,882,818,763,699,452,388,324, 260,1010,997,869,686,558,192,920,792,737,426,298,971,843,660,532,166,894,711, 400,272,1022,945,817,762,634,140,996,868,502,374,919,791,736,608,242,114,970, 842,476,348,893,710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918, 790,735,424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138, 994,866,500,372,917,789,734,606,240,112,968,840,474,346,891,708,580,214,1019, 942,814,759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528, 162,890,707,396,268,1018,941,813,758,630,136,992,864,498,370,915,787,732,604, 238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680, 552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811, 756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470,342,887,704, 576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290,963, 835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,494,366,911, 783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314,987, 859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701,390,262,1012, 935,807,752,624,130,986,858,492,364,909,781,726,598,232,960,832,466,338,883, 700,572,206,1011,934,806,751,440,312,985,857,674,546,180,908,780,725,414,286] [views:debug,2014-08-19T16:50:10.526,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/687. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:10.527,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",687,active,0} [ns_server:debug,2014-08-19T16:50:10.664,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 685. Nacking mccouch update. [views:debug,2014-08-19T16:50:10.664,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/685. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:10.664,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",685,active,0} [ns_server:debug,2014-08-19T16:50:10.666,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,488, 360,905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436, 308,981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,695,384, 256,1006,929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826, 460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774, 719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122, 978,850,484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926, 798,743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146, 874,691,508,380,1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588, 222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664, 536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795, 740,612,246,118,974,846,480,352,897,769,714,586,220,948,820,765,454,326,999, 871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713,402,274, 947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844,478, 350,959,895,831,712,648,584,520,218,154,1023,946,882,818,763,699,452,388,324, 260,1010,997,869,686,558,192,920,792,737,426,298,971,843,660,532,166,894,711, 400,272,1022,945,817,762,634,140,996,868,685,502,374,919,791,736,608,242,114, 970,842,476,348,893,710,582,216,1021,944,816,761,450,322,995,867,684,556,190, 918,790,735,424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632, 138,994,866,500,372,917,789,734,606,240,112,968,840,474,346,891,708,580,214, 1019,942,814,759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656, 528,162,890,707,396,268,1018,941,813,758,630,136,992,864,498,370,915,787,732, 604,238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318,991,863, 680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939, 811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470,342,887, 704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290, 963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,494,366, 911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314, 987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701,390,262, 1012,935,807,752,624,130,986,858,492,364,909,781,726,598,232,960,832,466,338, 883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180,908,780,725,414, 286] [views:debug,2014-08-19T16:50:10.731,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/685. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:10.731,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",685,active,0} [ns_server:debug,2014-08-19T16:50:10.881,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 683. Nacking mccouch update. [views:debug,2014-08-19T16:50:10.881,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/683. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:10.882,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",683,active,0} [ns_server:debug,2014-08-19T16:50:10.883,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,488, 360,905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436, 308,981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,695,384, 256,1006,929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826, 460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774, 719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122, 978,850,484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926, 798,743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146, 874,691,508,380,1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588, 222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664, 536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795, 740,612,246,118,974,846,480,352,897,769,714,586,220,948,820,765,454,326,999, 871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713,402,274, 947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844,478, 350,959,895,831,712,648,584,520,218,154,1023,946,882,818,763,699,452,388,324, 260,1010,997,869,686,558,192,920,792,737,426,298,971,843,660,532,166,894,711, 400,272,1022,945,817,762,634,140,996,868,685,502,374,919,791,736,608,242,114, 970,842,476,348,893,710,582,216,1021,944,816,761,450,322,995,867,684,556,190, 918,790,735,424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632, 138,994,866,683,500,372,917,789,734,606,240,112,968,840,474,346,891,708,580, 214,1019,942,814,759,448,320,993,865,682,554,188,916,788,733,422,294,967,839, 656,528,162,890,707,396,268,1018,941,813,758,630,136,992,864,498,370,915,787, 732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318,991, 863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016, 939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470,342, 887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418, 290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,494, 366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442, 314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701,390, 262,1012,935,807,752,624,130,986,858,492,364,909,781,726,598,232,960,832,466, 338,883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180,908,780,725, 414,286] [views:debug,2014-08-19T16:50:10.957,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/683. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:10.957,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",683,active,0} [ns_server:debug,2014-08-19T16:50:11.124,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 681. Nacking mccouch update. [views:debug,2014-08-19T16:50:11.124,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/681. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:11.124,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",681,active,0} [ns_server:debug,2014-08-19T16:50:11.125,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,488, 360,905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436, 308,981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,695,384, 256,1006,929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826, 460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774, 719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122, 978,850,484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926, 798,743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146, 874,691,508,380,1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588, 222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664, 536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795, 740,612,246,118,974,846,480,352,897,769,714,586,220,948,820,765,454,326,999, 871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713,402,274, 947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844,478, 350,959,895,831,712,648,584,520,218,154,1023,946,882,818,763,699,452,388,324, 260,1010,997,869,686,558,192,920,792,737,426,298,971,843,660,532,166,894,711, 400,272,1022,945,817,762,634,140,996,868,685,502,374,919,791,736,608,242,114, 970,842,476,348,893,710,582,216,1021,944,816,761,450,322,995,867,684,556,190, 918,790,735,424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632, 138,994,866,683,500,372,917,789,734,606,240,112,968,840,474,346,891,708,580, 214,1019,942,814,759,448,320,993,865,682,554,188,916,788,733,422,294,967,839, 656,528,162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915, 787,732,604,238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318, 991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266, 1016,939,811,756,628,134,990,862,496,368,913,785,730,602,236,108,964,836,470, 342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729, 418,290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860, 494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753, 442,314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701, 390,262,1012,935,807,752,624,130,986,858,492,364,909,781,726,598,232,960,832, 466,338,883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180,908,780, 725,414,286] [views:debug,2014-08-19T16:50:11.183,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/681. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:11.183,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",681,active,0} [ns_server:debug,2014-08-19T16:50:11.349,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 679. Nacking mccouch update. [views:debug,2014-08-19T16:50:11.350,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/679. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:11.350,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",679,active,0} [ns_server:debug,2014-08-19T16:50:11.351,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,488, 360,905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436, 308,981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,695,384, 256,1006,929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826, 460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774, 719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122, 978,850,484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926, 798,743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146, 874,691,508,380,1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588, 222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664, 536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795, 740,612,246,118,974,846,480,352,897,769,714,586,220,948,820,765,454,326,999, 871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713,402,274, 947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844,478, 350,895,712,584,218,1023,946,882,818,763,699,452,388,324,260,1010,997,869, 686,558,192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945, 817,762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,476,348, 893,710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790,735,424, 296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683, 500,372,917,789,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814, 759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162,890, 707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604,238, 110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680,552, 186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811,756, 628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,470,342,887,704, 576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290,963, 835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,494,366,911, 783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314,987, 859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701,390,262,1012, 935,807,752,624,130,986,858,492,364,909,781,726,598,232,960,832,466,338,883, 700,572,206,1011,934,806,751,440,312,985,857,674,546,180,908,780,725,414,286, 959,831,648,520,154] [views:debug,2014-08-19T16:50:11.433,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/679. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:11.434,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",679,active,0} [ns_server:debug,2014-08-19T16:50:11.609,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 677. Nacking mccouch update. [views:debug,2014-08-19T16:50:11.609,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/677. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:11.609,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",677,active,0} [ns_server:debug,2014-08-19T16:50:11.610,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,488, 360,905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436, 308,981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,695,384, 256,1006,929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826, 460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774, 719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122, 978,850,484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926, 798,743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146, 874,691,508,380,1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588, 222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664, 536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795, 740,612,246,118,974,846,480,352,897,769,714,586,220,948,820,765,454,326,999, 871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713,402,274, 947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844,478, 350,895,712,584,218,1023,946,882,818,763,699,452,388,324,260,1010,997,869, 686,558,192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945, 817,762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,476,348, 893,710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790,735,424, 296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683, 500,372,917,789,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814, 759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162,890, 707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604,238, 110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680,552, 186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811,756, 628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,470,342,887,704, 576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290,963, 835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,677,494,366, 911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314, 987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701,390,262, 1012,935,807,752,624,130,986,858,492,364,909,781,726,598,232,960,832,466,338, 883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180,908,780,725,414, 286,959,831,648,520,154] [views:debug,2014-08-19T16:50:11.669,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/677. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:11.669,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",677,active,0} [ns_server:debug,2014-08-19T16:50:11.744,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 675. Nacking mccouch update. [views:debug,2014-08-19T16:50:11.744,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/675. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:11.744,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",675,active,0} [ns_server:debug,2014-08-19T16:50:11.745,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,490,362,907,779,724,596,230,958,830,464,336,881, 698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284, 957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,488, 360,905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436, 308,981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,695,384, 256,1006,929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954,826, 460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774, 719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122, 978,850,484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926, 798,743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146, 874,691,508,380,1002,925,797,742,614,248,120,976,848,482,354,899,771,716,588, 222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664, 536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795, 740,612,246,118,974,846,480,352,897,769,714,586,220,948,820,765,454,326,999, 871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713,402,274, 947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844,478, 350,895,712,584,218,1023,946,882,818,763,699,452,388,324,260,1010,997,869, 686,558,192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945, 817,762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,476,348, 893,710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790,735,424, 296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683, 500,372,917,789,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814, 759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162,890, 707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604,238, 110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680,552, 186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811,756, 628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,470,342,887,704, 576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290,963, 835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,677,494,366, 911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314, 987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701,390,262, 1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832,466, 338,883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180,908,780,725, 414,286,959,831,648,520,154] [views:debug,2014-08-19T16:50:11.778,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/675. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:11.778,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",675,active,0} [ns_server:debug,2014-08-19T16:50:11.853,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 673. Nacking mccouch update. [views:debug,2014-08-19T16:50:11.853,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/673. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:11.853,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",673,active,0} [ns_server:debug,2014-08-19T16:50:11.855,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,673,490,362,907,779,724,596,230,958,830,464,336, 881,698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412, 284,957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854, 488,360,905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747, 436,308,981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,695, 384,256,1006,929,801,746,618,252,124,980,852,486,358,903,775,720,592,226,954, 826,460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902, 774,719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250, 122,978,850,484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003, 926,798,743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512, 146,874,691,508,380,1002,925,797,742,614,248,120,976,848,482,354,899,771,716, 588,222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975,847, 664,536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923, 795,740,612,246,118,974,846,480,352,897,769,714,586,220,948,820,765,454,326, 999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713,402, 274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844, 478,350,895,712,584,218,1023,946,882,818,763,699,452,388,324,260,1010,997, 869,686,558,192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022, 945,817,762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,476, 348,893,710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790,735, 424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866, 683,500,372,917,789,734,606,240,112,968,840,474,346,891,708,580,214,1019,942, 814,759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162, 890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604, 238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680, 552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811, 756,628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,470,342,887, 704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290, 963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,677,494, 366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442, 314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701,390, 262,1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832, 466,338,883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180,908,780, 725,414,286,959,831,648,520,154] [views:debug,2014-08-19T16:50:11.887,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/673. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:11.887,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",673,active,0} [ns_server:debug,2014-08-19T16:50:11.962,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 671. Nacking mccouch update. [views:debug,2014-08-19T16:50:11.962,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/671. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:11.962,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",671,active,0} [ns_server:debug,2014-08-19T16:50:11.964,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,673,490,362,907,779,724,596,230,958,830,464,336, 881,698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412, 284,957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854, 671,488,360,905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802, 747,436,308,981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878, 695,384,256,1006,929,801,746,618,252,124,980,852,486,358,903,775,720,592,226, 954,826,460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174, 902,774,719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744,616, 250,122,978,850,484,356,901,773,718,590,224,952,824,458,330,875,692,564,198, 1003,926,798,743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640, 512,146,874,691,508,380,1002,925,797,742,614,248,120,976,848,482,354,899,771, 716,588,222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975, 847,664,536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000, 923,795,740,612,246,118,974,846,480,352,897,769,714,586,220,948,820,765,454, 326,999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713, 402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972, 844,478,350,895,712,584,218,1023,946,882,818,763,699,452,388,324,260,1010, 997,869,686,558,192,920,792,737,426,298,971,843,660,532,166,894,711,400,272, 1022,945,817,762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842, 476,348,893,710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790, 735,424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994, 866,683,500,372,917,789,734,606,240,112,968,840,474,346,891,708,580,214,1019, 942,814,759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528, 162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732, 604,238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318,991,863, 680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939, 811,756,628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,470,342, 887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418, 290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,677, 494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753, 442,314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701, 390,262,1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960, 832,466,338,883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180,908, 780,725,414,286,959,831,648,520,154] [views:debug,2014-08-19T16:50:11.996,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/671. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:11.996,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",671,active,0} [ns_server:debug,2014-08-19T16:50:12.071,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 669. Nacking mccouch update. [views:debug,2014-08-19T16:50:12.071,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/669. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:12.071,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",669,active,0} [ns_server:debug,2014-08-19T16:50:12.073,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,673,490,362,907,779,724,596,230,958,830,464,336, 881,698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412, 284,957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854, 671,488,360,905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802, 747,436,308,981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878, 695,384,256,1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720,592, 226,954,826,460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540, 174,902,774,719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744, 616,250,122,978,850,484,356,901,773,718,590,224,952,824,458,330,875,692,564, 198,1003,926,798,743,432,304,977,849,666,538,172,900,772,717,406,278,951,823, 640,512,146,874,691,508,380,1002,925,797,742,614,248,120,976,848,482,354,899, 771,716,588,222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302, 975,847,664,536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378, 1000,923,795,740,612,246,118,974,846,480,352,897,769,714,586,220,948,820,765, 454,326,999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768, 713,402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116, 972,844,478,350,895,712,584,218,1023,946,818,763,452,324,997,869,686,558,192, 920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762,634, 140,996,868,685,502,374,919,791,736,608,242,114,970,842,476,348,893,710,582, 216,1021,944,816,761,450,322,995,867,684,556,190,918,790,735,424,296,969,841, 658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683,500,372,917, 789,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,759,448,320, 993,865,682,554,188,916,788,733,422,294,967,839,656,528,162,890,707,396,268, 1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604,238,110,966,838, 472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680,552,186,914,786, 731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811,756,628,134,990, 862,679,496,368,913,785,730,602,236,108,964,836,470,342,887,704,576,210,1015, 938,810,755,444,316,989,861,678,550,184,912,784,729,418,290,963,835,652,524, 158,886,703,392,264,1014,937,809,754,626,132,988,860,677,494,366,911,783,728, 600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314,987,859,676, 548,182,910,782,727,416,288,961,833,650,522,156,884,701,390,262,1012,935,807, 752,624,130,986,858,675,492,364,909,781,726,598,232,960,832,466,338,883,700, 572,206,1011,934,806,751,440,312,985,857,674,546,180,908,780,725,414,286,959, 831,648,520,154,882,699,388,260,1010] [views:debug,2014-08-19T16:50:12.107,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/669. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:12.107,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",669,active,0} [ns_server:debug,2014-08-19T16:50:12.267,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 667. Nacking mccouch update. [views:debug,2014-08-19T16:50:12.268,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/667. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:12.268,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",667,active,0} [ns_server:debug,2014-08-19T16:50:12.270,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,673,490,362,907,779,724,596,230,958,830,464,336, 881,698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412, 284,957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854, 671,488,360,905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802, 747,436,308,981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878, 695,384,256,1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720,592, 226,954,826,460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540, 174,902,774,719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744, 616,250,122,978,850,667,484,356,901,773,718,590,224,952,824,458,330,875,692, 564,198,1003,926,798,743,432,304,977,849,666,538,172,900,772,717,406,278,951, 823,640,512,146,874,691,508,380,1002,925,797,742,614,248,120,976,848,482,354, 899,771,716,588,222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430, 302,975,847,664,536,170,898,770,715,404,276,949,821,766,638,144,872,689,506, 378,1000,923,795,740,612,246,118,974,846,480,352,897,769,714,586,220,948,820, 765,454,326,999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896, 768,713,402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244, 116,972,844,478,350,895,712,584,218,1023,946,818,763,452,324,997,869,686,558, 192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762, 634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,476,348,893,710, 582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790,735,424,296,969, 841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683,500,372, 917,789,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,759,448, 320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162,890,707,396, 268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604,238,110,966, 838,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680,552,186,914, 786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811,756,628,134, 990,862,679,496,368,913,785,730,602,236,108,964,836,470,342,887,704,576,210, 1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290,963,835,652, 524,158,886,703,392,264,1014,937,809,754,626,132,988,860,677,494,366,911,783, 728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314,987,859, 676,548,182,910,782,727,416,288,961,833,650,522,156,884,701,390,262,1012,935, 807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832,466,338,883, 700,572,206,1011,934,806,751,440,312,985,857,674,546,180,908,780,725,414,286, 959,831,648,520,154,882,699,388,260,1010] [views:debug,2014-08-19T16:50:12.352,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/667. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:12.352,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",667,active,0} [ns_server:debug,2014-08-19T16:50:12.526,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 665. Nacking mccouch update. [views:debug,2014-08-19T16:50:12.527,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/665. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:12.527,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",665,active,0} [ns_server:debug,2014-08-19T16:50:12.528,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,673,490,362,907,779,724,596,230,958,830,464,336, 881,698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412, 284,957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854, 671,488,360,905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802, 747,436,308,981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878, 695,384,256,1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720,592, 226,954,826,460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540, 174,902,774,719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744, 616,250,122,978,850,667,484,356,901,773,718,590,224,952,824,458,330,875,692, 564,198,1003,926,798,743,432,304,977,849,666,538,172,900,772,717,406,278,951, 823,640,512,146,874,691,508,380,1002,925,797,742,614,248,120,976,848,665,482, 354,899,771,716,588,222,950,822,767,456,328,873,690,562,196,1001,924,796,741, 430,302,975,847,664,536,170,898,770,715,404,276,949,821,766,638,144,872,689, 506,378,1000,923,795,740,612,246,118,974,846,480,352,897,769,714,586,220,948, 820,765,454,326,999,871,688,560,194,922,794,739,428,300,973,845,662,534,168, 896,768,713,402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610, 244,116,972,844,478,350,895,712,584,218,1023,946,818,763,452,324,997,869,686, 558,192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817, 762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,476,348,893, 710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790,735,424,296, 969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683,500, 372,917,789,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,759, 448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162,890,707, 396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604,238,110, 966,838,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680,552,186, 914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811,756,628, 134,990,862,679,496,368,913,785,730,602,236,108,964,836,470,342,887,704,576, 210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290,963,835, 652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,677,494,366,911, 783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314,987, 859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701,390,262,1012, 935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832,466,338, 883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180,908,780,725,414, 286,959,831,648,520,154,882,699,388,260,1010] [views:debug,2014-08-19T16:50:12.602,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/665. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:12.602,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",665,active,0} [ns_server:debug,2014-08-19T16:50:12.777,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 663. Nacking mccouch update. [views:debug,2014-08-19T16:50:12.777,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/663. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:12.778,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",663,active,0} [ns_server:debug,2014-08-19T16:50:12.779,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,673,490,362,907,779,724,596,230,958,830,464,336, 881,698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412, 284,957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854, 671,488,360,905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802, 747,436,308,981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878, 695,384,256,1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720,592, 226,954,826,460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540, 174,902,774,719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744, 616,250,122,978,850,667,484,356,901,773,718,590,224,952,824,458,330,875,692, 564,198,1003,926,798,743,432,304,977,849,666,538,172,900,772,717,406,278,951, 823,640,512,146,874,691,508,380,1002,925,797,742,614,248,120,976,848,665,482, 354,899,771,716,588,222,950,822,767,456,328,873,690,562,196,1001,924,796,741, 430,302,975,847,664,536,170,898,770,715,404,276,949,821,766,638,144,872,689, 506,378,1000,923,795,740,612,246,118,974,846,663,480,352,897,769,714,586,220, 948,820,765,454,326,999,871,688,560,194,922,794,739,428,300,973,845,662,534, 168,896,768,713,402,274,947,819,764,636,142,998,870,687,504,376,921,793,738, 610,244,116,972,844,478,350,895,712,584,218,1023,946,818,763,452,324,997,869, 686,558,192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945, 817,762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,476,348, 893,710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790,735,424, 296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683, 500,372,917,789,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814, 759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162,890, 707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604,238, 110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680,552, 186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811,756, 628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,470,342,887,704, 576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290,963, 835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,677,494,366, 911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314, 987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701,390,262, 1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832,466, 338,883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180,908,780,725, 414,286,959,831,648,520,154,882,699,388,260,1010] [views:debug,2014-08-19T16:50:12.861,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/663. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:12.861,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",663,active,0} [ns_server:debug,2014-08-19T16:50:12.949,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,448, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_448_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_448_'ns_1@10.242.238.91'">>}]}, {move_state,704, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_704_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_704_'ns_1@10.242.238.91'">>}]}, {move_state,449, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_449_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_449_'ns_1@10.242.238.91'">>}]}, {move_state,705, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_705_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_705_'ns_1@10.242.238.91'">>}]}, {move_state,450, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_450_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_450_'ns_1@10.242.238.91'">>}]}, {move_state,706, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_706_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_706_'ns_1@10.242.238.91'">>}]}, {move_state,960, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_960_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_960_'ns_1@10.242.238.90'">>}]}, {move_state,451, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_451_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_451_'ns_1@10.242.238.91'">>}]}, {move_state,707, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_707_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_707_'ns_1@10.242.238.91'">>}]}, {move_state,961, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_961_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_961_'ns_1@10.242.238.90'">>}]}, {move_state,452, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_452_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_452_'ns_1@10.242.238.91'">>}]}, {move_state,708, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_708_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_708_'ns_1@10.242.238.91'">>}]}, {move_state,962, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_962_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_962_'ns_1@10.242.238.90'">>}]}, {move_state,453, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_453_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_453_'ns_1@10.242.238.91'">>}]}, {move_state,709, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_709_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_709_'ns_1@10.242.238.91'">>}]}, {move_state,963, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_963_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_963_'ns_1@10.242.238.90'">>}]}, {move_state,454, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_454_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_454_'ns_1@10.242.238.91'">>}]}, {move_state,710, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_710_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_710_'ns_1@10.242.238.91'">>}]}, {move_state,964, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_964_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_964_'ns_1@10.242.238.90'">>}]}, {move_state,455, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_455_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_455_'ns_1@10.242.238.91'">>}]}, {move_state,711, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_711_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_711_'ns_1@10.242.238.91'">>}]}, {move_state,965, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_965_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_965_'ns_1@10.242.238.90'">>}]}, {move_state,456, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_456_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_456_'ns_1@10.242.238.91'">>}]}, {move_state,712, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_712_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_712_'ns_1@10.242.238.91'">>}]}, {move_state,966, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_966_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_966_'ns_1@10.242.238.90'">>}]}, {move_state,457, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_457_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_457_'ns_1@10.242.238.91'">>}]}, {move_state,713, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_713_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_713_'ns_1@10.242.238.91'">>}]}, {move_state,967, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_967_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_967_'ns_1@10.242.238.90'">>}]}, {move_state,458, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_458_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_458_'ns_1@10.242.238.91'">>}]}, {move_state,714, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_714_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_714_'ns_1@10.242.238.91'">>}]}, {move_state,968, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_968_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_968_'ns_1@10.242.238.90'">>}]}, {move_state,459, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_459_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_459_'ns_1@10.242.238.91'">>}]}, {move_state,715, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_715_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_715_'ns_1@10.242.238.91'">>}]}, {move_state,969, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_969_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_969_'ns_1@10.242.238.90'">>}]}, {move_state,460, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_460_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_460_'ns_1@10.242.238.91'">>}]}, {move_state,716, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_716_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_716_'ns_1@10.242.238.91'">>}]}, {move_state,970, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_970_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_970_'ns_1@10.242.238.90'">>}]}, {move_state,461, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_461_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_461_'ns_1@10.242.238.91'">>}]}, {move_state,717, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_717_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_717_'ns_1@10.242.238.91'">>}]}, {move_state,971, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_971_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_971_'ns_1@10.242.238.90'">>}]}, {move_state,462, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_462_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_462_'ns_1@10.242.238.91'">>}]}, {move_state,718, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_718_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_718_'ns_1@10.242.238.91'">>}]}, {move_state,972, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_972_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_972_'ns_1@10.242.238.90'">>}]}, {move_state,463, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_463_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_463_'ns_1@10.242.238.91'">>}]}, {move_state,719, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_719_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_719_'ns_1@10.242.238.91'">>}]}, {move_state,973, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_973_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_973_'ns_1@10.242.238.90'">>}]}, {move_state,464, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_464_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_464_'ns_1@10.242.238.91'">>}]}, {move_state,720, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_720_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_720_'ns_1@10.242.238.91'">>}]}, {move_state,974, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_974_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_974_'ns_1@10.242.238.90'">>}]}, {move_state,465, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_465_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_465_'ns_1@10.242.238.91'">>}]}, {move_state,721, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_721_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_721_'ns_1@10.242.238.91'">>}]}, {move_state,975, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_975_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_975_'ns_1@10.242.238.90'">>}]}, {move_state,466, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_466_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_466_'ns_1@10.242.238.91'">>}]}, {move_state,722, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_722_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_722_'ns_1@10.242.238.91'">>}]}, {move_state,976, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_976_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_976_'ns_1@10.242.238.90'">>}]}, {move_state,467, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_467_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_467_'ns_1@10.242.238.91'">>}]}, {move_state,723, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_723_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_723_'ns_1@10.242.238.91'">>}]}, {move_state,977, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_977_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_977_'ns_1@10.242.238.90'">>}]}, {move_state,468, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_468_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_468_'ns_1@10.242.238.91'">>}]}, {move_state,724, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_724_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_724_'ns_1@10.242.238.91'">>}]}, {move_state,978, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_978_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_978_'ns_1@10.242.238.90'">>}]}, {move_state,469, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_469_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_469_'ns_1@10.242.238.91'">>}]}, {move_state,725, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_725_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_725_'ns_1@10.242.238.91'">>}]}, {move_state,979, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_979_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_979_'ns_1@10.242.238.90'">>}]}] [ns_server:debug,2014-08-19T16:50:12.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 448, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 704, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 449, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 705, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 450, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 706, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 960, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:12.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 451, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 707, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 961, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:12.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 452, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 708, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 962, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:12.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 453, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 709, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 963, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:12.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 454, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 710, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 964, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:12.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 455, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 711, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 965, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:12.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 456, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 712, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 966, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:12.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 457, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 713, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 967, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:12.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 458, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 714, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 968, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:12.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 459, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 715, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 969, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:12.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 460, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 716, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 970, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:12.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 461, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 717, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 971, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:12.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 462, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 718, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 972, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:12.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 463, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 719, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 973, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:12.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 464, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 720, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 974, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:12.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 465, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 721, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 975, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:12.985,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 466, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.986,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 722, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.986,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 976, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:12.987,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 467, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.987,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 723, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.988,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 977, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:12.988,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 468, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.989,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 724, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.989,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 978, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:12.990,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 469, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.990,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 725, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:12.991,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 979, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:13.045,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 661. Nacking mccouch update. [views:debug,2014-08-19T16:50:13.045,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/661. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:13.045,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",661,active,0} [ns_server:debug,2014-08-19T16:50:13.047,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,805,750,622,128,984,856,673,490,362,907,779,724,596,230,958,830,464,336, 881,698,570,204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412, 284,957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854, 671,488,360,905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802, 747,436,308,981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878, 695,384,256,1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720,592, 226,954,826,460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540, 174,902,774,719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744, 616,250,122,978,850,667,484,356,901,773,718,590,224,952,824,458,330,875,692, 564,198,1003,926,798,743,432,304,977,849,666,538,172,900,772,717,406,278,951, 823,640,512,146,874,691,508,380,1002,925,797,742,614,248,120,976,848,665,482, 354,899,771,716,588,222,950,822,767,456,328,873,690,562,196,1001,924,796,741, 430,302,975,847,664,536,170,898,770,715,404,276,949,821,766,638,144,872,689, 506,378,1000,923,795,740,612,246,118,974,846,663,480,352,897,769,714,586,220, 948,820,765,454,326,999,871,688,560,194,922,794,739,428,300,973,845,662,534, 168,896,768,713,402,274,947,819,764,636,142,998,870,687,504,376,921,793,738, 610,244,116,972,844,661,478,350,895,712,584,218,1023,946,818,763,452,324,997, 869,686,558,192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022, 945,817,762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,476, 348,893,710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790,735, 424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866, 683,500,372,917,789,734,606,240,112,968,840,474,346,891,708,580,214,1019,942, 814,759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162, 890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604, 238,110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680, 552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811, 756,628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,470,342,887, 704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290, 963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,677,494, 366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442, 314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701,390, 262,1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832, 466,338,883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180,908,780, 725,414,286,959,831,648,520,154,882,699,388,260,1010] [views:debug,2014-08-19T16:50:13.129,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/661. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:13.129,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",661,active,0} [ns_server:debug,2014-08-19T16:50:13.298,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 659. Nacking mccouch update. [views:debug,2014-08-19T16:50:13.298,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/659. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:13.298,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",659,active,0} [ns_server:debug,2014-08-19T16:50:13.300,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,984,856,673,490,362,907,779,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284,957,829, 646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,671,488,360, 905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308, 981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,695,384,256, 1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720,592,226,954,826, 460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774, 719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122, 978,850,667,484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003, 926,798,743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512, 146,874,691,508,380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771, 716,588,222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975, 847,664,536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000, 923,795,740,612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765, 454,326,999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768, 713,402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116, 972,844,661,478,350,895,712,584,218,1023,946,818,763,452,324,997,869,686,558, 192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762, 634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893, 710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790,735,424,296, 969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683,500, 372,917,789,734,606,240,112,968,840,474,346,891,708,580,214,1019,942,814,759, 448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162,890,707, 396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604,238,110, 966,838,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680,552,186, 914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811,756,628, 134,990,862,679,496,368,913,785,730,602,236,108,964,836,470,342,887,704,576, 210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290,963,835, 652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,677,494,366,911, 783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314,987, 859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701,390,262,1012, 935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832,466,338, 883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180,908,780,725,414, 286,959,831,648,520,154,882,699,388,260,1010,805,750,128] [views:debug,2014-08-19T16:50:13.332,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/659. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:13.332,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",659,active,0} [ns_server:debug,2014-08-19T16:50:13.424,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 657. Nacking mccouch update. [views:debug,2014-08-19T16:50:13.424,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/657. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:13.424,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",657,active,0} [ns_server:debug,2014-08-19T16:50:13.425,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,984,856,673,490,362,907,779,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284,957,829, 646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,671,488,360, 905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308, 981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,695,384,256, 1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720,592,226,954,826, 460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774, 719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122, 978,850,667,484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003, 926,798,743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512, 146,874,691,508,380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771, 716,588,222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975, 847,664,536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000, 923,795,740,612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765, 454,326,999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768, 713,402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116, 972,844,661,478,350,895,712,584,218,1023,946,818,763,452,324,997,869,686,558, 192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762, 634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893, 710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790,735,424,296, 969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683,500, 372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942,814, 759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162,890, 707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604,238, 110,966,838,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680,552, 186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811,756, 628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,470,342,887,704, 576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290,963, 835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,677,494,366, 911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442,314, 987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701,390,262, 1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832,466, 338,883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180,908,780,725, 414,286,959,831,648,520,154,882,699,388,260,1010,805,750,128] [views:debug,2014-08-19T16:50:13.458,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/657. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:13.458,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",657,active,0} [ns_server:debug,2014-08-19T16:50:13.599,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 655. Nacking mccouch update. [views:debug,2014-08-19T16:50:13.599,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/655. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:13.600,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",655,active,0} [ns_server:debug,2014-08-19T16:50:13.601,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,984,856,673,490,362,907,779,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284,957,829, 646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,671,488,360, 905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308, 981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,695,384,256, 1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720,592,226,954,826, 460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774, 719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122, 978,850,667,484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003, 926,798,743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512, 146,874,691,508,380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771, 716,588,222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975, 847,664,536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000, 923,795,740,612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765, 454,326,999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768, 713,402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116, 972,844,661,478,350,895,712,584,218,1023,946,818,763,452,324,997,869,686,558, 192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762, 634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893, 710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790,735,424,296, 969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683,500, 372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942,814, 759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162,890, 707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604,238, 110,966,838,655,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680, 552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811, 756,628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,470,342,887, 704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290, 963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,677,494, 366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753,442, 314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701,390, 262,1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832, 466,338,883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180,908,780, 725,414,286,959,831,648,520,154,882,699,388,260,1010,805,750,128] [views:debug,2014-08-19T16:50:13.633,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/655. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:13.634,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",655,active,0} [ns_server:debug,2014-08-19T16:50:13.733,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 653. Nacking mccouch update. [views:debug,2014-08-19T16:50:13.734,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/653. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:13.734,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",653,active,0} [ns_server:debug,2014-08-19T16:50:13.735,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,984,856,673,490,362,907,779,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284,957,829, 646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,671,488,360, 905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308, 981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,695,384,256, 1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720,592,226,954,826, 460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774, 719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122, 978,850,667,484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003, 926,798,743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512, 146,874,691,508,380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771, 716,588,222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975, 847,664,536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000, 923,795,740,612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765, 454,326,999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768, 713,402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116, 972,844,661,478,350,895,712,584,218,1023,946,818,763,452,324,997,869,686,558, 192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762, 634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893, 710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790,735,424,296, 969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683,500, 372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942,814, 759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162,890, 707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604,238, 110,966,838,655,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680, 552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811, 756,628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,653,470,342, 887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418, 290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,677, 494,366,911,783,728,600,234,962,834,468,340,885,702,574,208,1013,936,808,753, 442,314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701, 390,262,1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960, 832,466,338,883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180,908, 780,725,414,286,959,831,648,520,154,882,699,388,260,1010,805,750,128] [views:debug,2014-08-19T16:50:13.767,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/653. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:13.768,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",653,active,0} [ns_server:debug,2014-08-19T16:50:13.868,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 651. Nacking mccouch update. [views:debug,2014-08-19T16:50:13.868,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/651. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:13.868,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",651,active,0} [ns_server:debug,2014-08-19T16:50:13.869,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,984,856,673,490,362,907,779,724,596,230,958,830,464,336,881,698,570, 204,1009,932,804,749,438,310,983,855,672,544,178,906,778,723,412,284,957,829, 646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,671,488,360, 905,777,722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308, 981,853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,695,384,256, 1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720,592,226,954,826, 460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774, 719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122, 978,850,667,484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003, 926,798,743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512, 146,874,691,508,380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771, 716,588,222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975, 847,664,536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000, 923,795,740,612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765, 454,326,999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768, 713,402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116, 972,844,661,478,350,895,712,584,218,1023,946,818,763,452,324,997,869,686,558, 192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762, 634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893, 710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790,735,424,296, 969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683,500, 372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942,814, 759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162,890, 707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604,238, 110,966,838,655,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680, 552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811, 756,628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,653,470,342, 887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418, 290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,677, 494,366,911,783,728,600,234,962,834,651,468,340,885,702,574,208,1013,936,808, 753,442,314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884, 701,390,262,1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232, 960,832,466,338,883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180, 908,780,725,414,286,959,831,648,520,154,882,699,388,260,1010,805,750,128] [views:debug,2014-08-19T16:50:13.902,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/651. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:13.903,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",651,active,0} [ns_server:debug,2014-08-19T16:50:14.079,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 649. Nacking mccouch update. [views:debug,2014-08-19T16:50:14.080,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/649. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:14.080,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",649,active,0} [ns_server:debug,2014-08-19T16:50:14.081,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,907,779,724,596,230,958,830,464,336,881,698,570,204,1009,932, 804,749,438,310,983,855,672,544,178,906,778,723,412,284,957,829,646,518,152, 880,697,386,258,1008,931,803,748,620,254,126,982,854,671,488,360,905,777,722, 594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308,981,853,670, 542,176,904,776,721,410,282,955,827,644,516,150,878,695,384,256,1006,929,801, 746,618,252,124,980,852,669,486,358,903,775,720,592,226,954,826,460,332,877, 694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408,280, 953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667, 484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926,798,743, 432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,691, 508,380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588,222, 950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664,536, 170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740, 612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,454,326,999, 871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713,402,274, 947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844,661, 478,350,895,712,584,218,1023,946,818,763,452,324,997,869,686,558,192,920,792, 737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762,634,140,996, 868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893,710,582,216, 1021,944,816,761,450,322,995,867,684,556,190,918,790,735,424,296,969,841,658, 530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683,500,372,917,789, 734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942,814,759,448,320, 993,865,682,554,188,916,788,733,422,294,967,839,656,528,162,890,707,396,268, 1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604,238,110,966,838, 655,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680,552,186,914, 786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811,756,628,134, 990,862,679,496,368,913,785,730,602,236,108,964,836,653,470,342,887,704,576, 210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290,963,835, 652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,677,494,366,911, 783,728,600,234,962,834,651,468,340,885,702,574,208,1013,936,808,753,442,314, 987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701,390,262, 1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832,649, 466,338,883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180,908,780, 725,414,286,959,831,648,520,154,882,699,388,260,1010,805,750,128,984,673,362] [views:debug,2014-08-19T16:50:14.163,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/649. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:14.164,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",649,active,0} [ns_server:debug,2014-08-19T16:50:14.348,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 647. Nacking mccouch update. [views:debug,2014-08-19T16:50:14.348,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/647. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:14.348,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",647,active,0} [ns_server:debug,2014-08-19T16:50:14.350,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,907,779,724,596,230,958,830,647,464,336,881,698,570,204,1009, 932,804,749,438,310,983,855,672,544,178,906,778,723,412,284,957,829,646,518, 152,880,697,386,258,1008,931,803,748,620,254,126,982,854,671,488,360,905,777, 722,594,228,956,828,462,334,879,696,568,202,1007,930,802,747,436,308,981,853, 670,542,176,904,776,721,410,282,955,827,644,516,150,878,695,384,256,1006,929, 801,746,618,252,124,980,852,669,486,358,903,775,720,592,226,954,826,460,332, 877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408, 280,953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122,978,850, 667,484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926,798, 743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874, 691,508,380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588, 222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664, 536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795, 740,612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,454,326, 999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713,402, 274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844, 661,478,350,895,712,584,218,1023,946,818,763,452,324,997,869,686,558,192,920, 792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762,634,140, 996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893,710,582, 216,1021,944,816,761,450,322,995,867,684,556,190,918,790,735,424,296,969,841, 658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683,500,372,917, 789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942,814,759,448, 320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162,890,707,396, 268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604,238,110,966, 838,655,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680,552,186, 914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811,756,628, 134,990,862,679,496,368,913,785,730,602,236,108,964,836,653,470,342,887,704, 576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290,963, 835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,677,494,366, 911,783,728,600,234,962,834,651,468,340,885,702,574,208,1013,936,808,753,442, 314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701,390, 262,1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832, 649,466,338,883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180,908, 780,725,414,286,959,831,648,520,154,882,699,388,260,1010,805,750,128,984,673, 362] [views:debug,2014-08-19T16:50:14.432,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/647. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:14.432,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",647,active,0} [ns_server:debug,2014-08-19T16:50:14.582,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 645. Nacking mccouch update. [views:debug,2014-08-19T16:50:14.582,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/645. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:14.582,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",645,active,0} [ns_server:debug,2014-08-19T16:50:14.584,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,907,779,724,596,230,958,830,647,464,336,881,698,570,204,1009, 932,804,749,438,310,983,855,672,544,178,906,778,723,412,284,957,829,646,518, 152,880,697,386,258,1008,931,803,748,620,254,126,982,854,671,488,360,905,777, 722,594,228,956,828,645,462,334,879,696,568,202,1007,930,802,747,436,308,981, 853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,695,384,256,1006, 929,801,746,618,252,124,980,852,669,486,358,903,775,720,592,226,954,826,460, 332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719, 408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122,978, 850,667,484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003,926, 798,743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146, 874,691,508,380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716, 588,222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975,847, 664,536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923, 795,740,612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,454, 326,999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713, 402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972, 844,661,478,350,895,712,584,218,1023,946,818,763,452,324,997,869,686,558,192, 920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762,634, 140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893,710, 582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790,735,424,296,969, 841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683,500,372, 917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942,814,759, 448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162,890,707, 396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604,238,110, 966,838,655,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680,552, 186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811,756, 628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,653,470,342,887, 704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290, 963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,677,494, 366,911,783,728,600,234,962,834,651,468,340,885,702,574,208,1013,936,808,753, 442,314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701, 390,262,1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960, 832,649,466,338,883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180, 908,780,725,414,286,959,831,648,520,154,882,699,388,260,1010,805,750,128,984, 673,362] [views:debug,2014-08-19T16:50:14.658,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/645. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:14.658,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",645,active,0} [ns_server:debug,2014-08-19T16:50:14.808,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 643. Nacking mccouch update. [views:debug,2014-08-19T16:50:14.808,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/643. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:14.808,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",643,active,0} [ns_server:debug,2014-08-19T16:50:14.810,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,907,779,724,596,230,958,830,647,464,336,881,698,570,204,1009, 932,804,749,438,310,983,855,672,544,178,906,778,723,412,284,957,829,646,518, 152,880,697,386,258,1008,931,803,748,620,254,126,982,854,671,488,360,905,777, 722,594,228,956,828,645,462,334,879,696,568,202,1007,930,802,747,436,308,981, 853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,695,384,256,1006, 929,801,746,618,252,124,980,852,669,486,358,903,775,720,592,226,954,826,643, 460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774, 719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122, 978,850,667,484,356,901,773,718,590,224,952,824,458,330,875,692,564,198,1003, 926,798,743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512, 146,874,691,508,380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771, 716,588,222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302,975, 847,664,536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000, 923,795,740,612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765, 454,326,999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768, 713,402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116, 972,844,661,478,350,895,712,584,218,1023,946,818,763,452,324,997,869,686,558, 192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762, 634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893, 710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790,735,424,296, 969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683,500, 372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942,814, 759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162,890, 707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604,238, 110,966,838,655,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680, 552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811, 756,628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,653,470,342, 887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418, 290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,677, 494,366,911,783,728,600,234,962,834,651,468,340,885,702,574,208,1013,936,808, 753,442,314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884, 701,390,262,1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232, 960,832,649,466,338,883,700,572,206,1011,934,806,751,440,312,985,857,674,546, 180,908,780,725,414,286,959,831,648,520,154,882,699,388,260,1010,805,750,128, 984,673,362] [views:debug,2014-08-19T16:50:14.884,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/643. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:14.884,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",643,active,0} [ns_server:debug,2014-08-19T16:50:14.993,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 641. Nacking mccouch update. [views:debug,2014-08-19T16:50:14.993,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/641. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:14.994,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",641,active,0} [ns_server:debug,2014-08-19T16:50:14.995,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,907,779,724,596,230,958,830,647,464,336,881,698,570,204,1009, 932,804,749,438,310,983,855,672,544,178,906,778,723,412,284,957,829,646,518, 152,880,697,386,258,1008,931,803,748,620,254,126,982,854,671,488,360,905,777, 722,594,228,956,828,645,462,334,879,696,568,202,1007,930,802,747,436,308,981, 853,670,542,176,904,776,721,410,282,955,827,644,516,150,878,695,384,256,1006, 929,801,746,618,252,124,980,852,669,486,358,903,775,720,592,226,954,826,643, 460,332,877,694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774, 719,408,280,953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122, 978,850,667,484,356,901,773,718,590,224,952,824,641,458,330,875,692,564,198, 1003,926,798,743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640, 512,146,874,691,508,380,1002,925,797,742,614,248,120,976,848,665,482,354,899, 771,716,588,222,950,822,767,456,328,873,690,562,196,1001,924,796,741,430,302, 975,847,664,536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378, 1000,923,795,740,612,246,118,974,846,663,480,352,897,769,714,586,220,948,820, 765,454,326,999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896, 768,713,402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244, 116,972,844,661,478,350,895,712,584,218,1023,946,818,763,452,324,997,869,686, 558,192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817, 762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348, 893,710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790,735,424, 296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683, 500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942, 814,759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162, 890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604, 238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,446,318,991,863, 680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939, 811,756,628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,653,470, 342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729, 418,290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860, 677,494,366,911,783,728,600,234,962,834,651,468,340,885,702,574,208,1013,936, 808,753,442,314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156, 884,701,390,262,1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598, 232,960,832,649,466,338,883,700,572,206,1011,934,806,751,440,312,985,857,674, 546,180,908,780,725,414,286,959,831,648,520,154,882,699,388,260,1010,805,750, 128,984,673,362] [views:debug,2014-08-19T16:50:15.027,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/641. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:15.027,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",641,active,0} [ns_server:debug,2014-08-19T16:50:15.102,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 639. Nacking mccouch update. [views:debug,2014-08-19T16:50:15.102,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/639. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:15.102,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",639,active,0} [ns_server:debug,2014-08-19T16:50:15.104,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,830,647,464,336,881,698,570,204,1009,932,804,749, 438,310,983,855,672,544,178,906,778,723,412,284,957,829,646,518,152,880,697, 386,258,1008,931,803,748,620,254,126,982,854,671,488,360,905,777,722,594,228, 956,828,645,462,334,879,696,568,202,1007,930,802,747,436,308,981,853,670,542, 176,904,776,721,410,282,955,827,644,516,150,878,695,384,256,1006,929,801,746, 618,252,124,980,852,669,486,358,903,775,720,592,226,954,826,643,460,332,877, 694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408,280, 953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667, 484,356,901,773,718,590,224,952,824,641,458,330,875,692,564,198,1003,926,798, 743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874, 691,508,380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588, 222,950,822,767,639,456,328,873,690,562,196,1001,924,796,741,430,302,975,847, 664,536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923, 795,740,612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,454, 326,999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713, 402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972, 844,661,478,350,895,712,584,218,1023,946,818,763,452,324,997,869,686,558,192, 920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762,634, 140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893,710, 582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790,735,424,296,969, 841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683,500,372, 917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942,814,759, 448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162,890,707, 396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604,238,110, 966,838,655,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680,552, 186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811,756, 628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,653,470,342,887, 704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418,290, 963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,677,494, 366,911,783,728,600,234,962,834,651,468,340,885,702,574,208,1013,936,808,753, 442,314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884,701, 390,262,1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960, 832,649,466,338,883,700,572,206,1011,934,806,751,440,312,985,857,674,546,180, 908,780,725,414,286,959,831,648,520,154,882,699,388,260,1010,805,750,128,984, 673,362,907,596,230] [views:debug,2014-08-19T16:50:15.136,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/639. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:15.136,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",639,active,0} [ns_server:debug,2014-08-19T16:50:15.211,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 637. Nacking mccouch update. [views:debug,2014-08-19T16:50:15.211,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/637. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:15.212,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",637,active,0} [ns_server:debug,2014-08-19T16:50:15.213,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,830,647,464,336,881,698,570,204,1009,932,804,749, 438,310,983,855,672,544,178,906,778,723,412,284,957,829,646,518,152,880,697, 386,258,1008,931,803,748,620,254,126,982,854,671,488,360,905,777,722,594,228, 956,828,645,462,334,879,696,568,202,1007,930,802,747,436,308,981,853,670,542, 176,904,776,721,410,282,955,827,644,516,150,878,695,384,256,1006,929,801,746, 618,252,124,980,852,669,486,358,903,775,720,592,226,954,826,643,460,332,877, 694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408,280, 953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667, 484,356,901,773,718,590,224,952,824,641,458,330,875,692,564,198,1003,926,798, 743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874, 691,508,380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588, 222,950,822,767,639,456,328,873,690,562,196,1001,924,796,741,430,302,975,847, 664,536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923, 795,740,612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637, 454,326,999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768, 713,402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116, 972,844,661,478,350,895,712,584,218,1023,946,818,763,452,324,997,869,686,558, 192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762, 634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893, 710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790,735,424,296, 969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683,500, 372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942,814, 759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162,890, 707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604,238, 110,966,838,655,472,344,889,706,578,212,1017,940,812,757,446,318,991,863,680, 552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939,811, 756,628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,653,470,342, 887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729,418, 290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860,677, 494,366,911,783,728,600,234,962,834,651,468,340,885,702,574,208,1013,936,808, 753,442,314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156,884, 701,390,262,1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232, 960,832,649,466,338,883,700,572,206,1011,934,806,751,440,312,985,857,674,546, 180,908,780,725,414,286,959,831,648,520,154,882,699,388,260,1010,805,750,128, 984,673,362,907,596,230] [views:debug,2014-08-19T16:50:15.245,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/637. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:15.245,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",637,active,0} [ns_server:debug,2014-08-19T16:50:15.429,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 635. Nacking mccouch update. [views:debug,2014-08-19T16:50:15.429,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/635. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:15.429,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",635,active,0} [ns_server:debug,2014-08-19T16:50:15.431,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,830,647,464,336,881,698,570,204,1009,932,804,749, 438,310,983,855,672,544,178,906,778,723,412,284,957,829,646,518,152,880,697, 386,258,1008,931,803,748,620,254,126,982,854,671,488,360,905,777,722,594,228, 956,828,645,462,334,879,696,568,202,1007,930,802,747,436,308,981,853,670,542, 176,904,776,721,410,282,955,827,644,516,150,878,695,384,256,1006,929,801,746, 618,252,124,980,852,669,486,358,903,775,720,592,226,954,826,643,460,332,877, 694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408,280, 953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667, 484,356,901,773,718,590,224,952,824,641,458,330,875,692,564,198,1003,926,798, 743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874, 691,508,380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588, 222,950,822,767,639,456,328,873,690,562,196,1001,924,796,741,430,302,975,847, 664,536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923, 795,740,612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637, 454,326,999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768, 713,402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116, 972,844,661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686, 558,192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817, 762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348, 893,710,582,216,1021,944,816,761,450,322,995,867,684,556,190,918,790,735,424, 296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683, 500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942, 814,759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162, 890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604, 238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,446,318,991,863, 680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016,939, 811,756,628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,653,470, 342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784,729, 418,290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988,860, 677,494,366,911,783,728,600,234,962,834,651,468,340,885,702,574,208,1013,936, 808,753,442,314,987,859,676,548,182,910,782,727,416,288,961,833,650,522,156, 884,701,390,262,1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598, 232,960,832,649,466,338,883,700,572,206,1011,934,806,751,440,312,985,857,674, 546,180,908,780,725,414,286,959,831,648,520,154,882,699,388,260,1010,805,750, 128,984,673,362,907,596,230] [views:debug,2014-08-19T16:50:15.488,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/635. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:15.488,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",635,active,0} [ns_server:debug,2014-08-19T16:50:15.604,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 633. Nacking mccouch update. [views:debug,2014-08-19T16:50:15.605,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/633. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:15.605,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",633,active,0} [ns_server:debug,2014-08-19T16:50:15.606,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,830,647,464,336,881,698,570,204,1009,932,804,749, 438,310,983,855,672,544,178,906,778,723,412,284,957,829,646,518,152,880,697, 386,258,1008,931,803,748,620,254,126,982,854,671,488,360,905,777,722,594,228, 956,828,645,462,334,879,696,568,202,1007,930,802,747,436,308,981,853,670,542, 176,904,776,721,410,282,955,827,644,516,150,878,695,384,256,1006,929,801,746, 618,252,124,980,852,669,486,358,903,775,720,592,226,954,826,643,460,332,877, 694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408,280, 953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667, 484,356,901,773,718,590,224,952,824,641,458,330,875,692,564,198,1003,926,798, 743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874, 691,508,380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588, 222,950,822,767,639,456,328,873,690,562,196,1001,924,796,741,430,302,975,847, 664,536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923, 795,740,612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637, 454,326,999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768, 713,402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116, 972,844,661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686, 558,192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817, 762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866, 683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019, 942,814,759,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528, 162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732, 604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,446,318,991, 863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016, 939,811,756,628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,653, 470,342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784, 729,418,290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988, 860,677,494,366,911,783,728,600,234,962,834,651,468,340,885,702,574,208,1013, 936,808,753,442,314,987,859,676,548,182,910,782,727,416,288,961,833,650,522, 156,884,701,390,262,1012,935,807,752,624,130,986,858,675,492,364,909,781,726, 598,232,960,832,649,466,338,883,700,572,206,1011,934,806,751,440,312,985,857, 674,546,180,908,780,725,414,286,959,831,648,520,154,882,699,388,260,1010,805, 750,128,984,673,362,907,596,230] [views:debug,2014-08-19T16:50:15.659,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/633. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:15.659,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",633,active,0} [ns_server:debug,2014-08-19T16:50:15.734,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 631. Nacking mccouch update. [views:debug,2014-08-19T16:50:15.734,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/631. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:15.734,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",631,active,0} [ns_server:debug,2014-08-19T16:50:15.736,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,830,647,464,336,881,698,570,204,1009,932,804,749, 438,310,983,855,672,544,178,906,778,723,412,284,957,829,646,518,152,880,697, 386,258,1008,931,803,748,620,254,126,982,854,671,488,360,905,777,722,594,228, 956,828,645,462,334,879,696,568,202,1007,930,802,747,436,308,981,853,670,542, 176,904,776,721,410,282,955,827,644,516,150,878,695,384,256,1006,929,801,746, 618,252,124,980,852,669,486,358,903,775,720,592,226,954,826,643,460,332,877, 694,566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408,280, 953,825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667, 484,356,901,773,718,590,224,952,824,641,458,330,875,692,564,198,1003,926,798, 743,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874, 691,508,380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588, 222,950,822,767,639,456,328,873,690,562,196,1001,924,796,741,430,302,975,847, 664,536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923, 795,740,612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637, 454,326,999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768, 713,402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116, 972,844,661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686, 558,192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817, 762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866, 683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019, 942,814,759,631,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656, 528,162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787, 732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,446,318, 991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266, 1016,939,811,756,628,134,990,862,679,496,368,913,785,730,602,236,108,964,836, 653,470,342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912, 784,729,418,290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132, 988,860,677,494,366,911,783,728,600,234,962,834,651,468,340,885,702,574,208, 1013,936,808,753,442,314,987,859,676,548,182,910,782,727,416,288,961,833,650, 522,156,884,701,390,262,1012,935,807,752,624,130,986,858,675,492,364,909,781, 726,598,232,960,832,649,466,338,883,700,572,206,1011,934,806,751,440,312,985, 857,674,546,180,908,780,725,414,286,959,831,648,520,154,882,699,388,260,1010, 805,750,128,984,673,362,907,596,230] [views:debug,2014-08-19T16:50:15.768,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/631. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:15.768,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",631,active,0} [ns_server:debug,2014-08-19T16:50:15.860,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 629. Nacking mccouch update. [views:debug,2014-08-19T16:50:15.860,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/629. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:15.860,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",629,active,0} [ns_server:debug,2014-08-19T16:50:15.862,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,698,570,204,1009,932,804,749,438,310, 983,855,672,544,178,906,778,723,412,284,957,829,646,518,152,880,697,386,258, 1008,931,803,748,620,254,126,982,854,671,488,360,905,777,722,594,228,956,828, 645,462,334,879,696,568,202,1007,930,802,747,436,308,981,853,670,542,176,904, 776,721,410,282,955,827,644,516,150,878,695,384,256,1006,929,801,746,618,252, 124,980,852,669,486,358,903,775,720,592,226,954,826,643,460,332,877,694,566, 200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408,280,953,825, 642,514,148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667,484,356, 901,773,718,590,224,952,824,641,458,330,875,692,564,198,1003,926,798,743,432, 304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,691,508, 380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588,222,950, 822,767,639,456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664,536, 170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740, 612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326, 999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713,402, 274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844, 661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192, 920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762,634, 140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893,710, 582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,424,296, 969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683,500, 372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942,814, 759,631,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162, 890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604, 238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,629,446,318,991, 863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016, 939,811,756,628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,653, 470,342,887,704,576,210,1015,938,810,755,444,316,989,861,678,550,184,912,784, 729,418,290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132,988, 860,677,494,366,911,783,728,600,234,962,834,651,468,340,885,702,574,208,1013, 936,808,753,442,314,987,859,676,548,182,910,782,727,416,288,961,833,650,522, 156,884,701,390,262,1012,935,807,752,624,130,986,858,675,492,364,909,781,726, 598,232,960,832,649,466,338,883,700,572,206,1011,934,806,751,440,312,985,857, 674,546,180,908,780,725,414,286,959,831,648,520,154,882,699,388,260,1010,805, 750,128,984,673,362,907,596,230,830,464] [views:debug,2014-08-19T16:50:15.911,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/629. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:15.911,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",629,active,0} [ns_server:debug,2014-08-19T16:50:15.986,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 627. Nacking mccouch update. [views:debug,2014-08-19T16:50:15.986,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/627. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:15.986,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",627,active,0} [ns_server:debug,2014-08-19T16:50:15.988,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,698,570,204,1009,932,804,749,438,310, 983,855,672,544,178,906,778,723,412,284,957,829,646,518,152,880,697,386,258, 1008,931,803,748,620,254,126,982,854,671,488,360,905,777,722,594,228,956,828, 645,462,334,879,696,568,202,1007,930,802,747,436,308,981,853,670,542,176,904, 776,721,410,282,955,827,644,516,150,878,695,384,256,1006,929,801,746,618,252, 124,980,852,669,486,358,903,775,720,592,226,954,826,643,460,332,877,694,566, 200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408,280,953,825, 642,514,148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667,484,356, 901,773,718,590,224,952,824,641,458,330,875,692,564,198,1003,926,798,743,432, 304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,691,508, 380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588,222,950, 822,767,639,456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664,536, 170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740, 612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326, 999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713,402, 274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844, 661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192, 920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762,634, 140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893,710, 582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,424,296, 969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683,500, 372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942,814, 759,631,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162, 890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604, 238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,629,446,318,991, 863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016, 939,811,756,628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,653, 470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,184,912, 784,729,418,290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132, 988,860,677,494,366,911,783,728,600,234,962,834,651,468,340,885,702,574,208, 1013,936,808,753,442,314,987,859,676,548,182,910,782,727,416,288,961,833,650, 522,156,884,701,390,262,1012,935,807,752,624,130,986,858,675,492,364,909,781, 726,598,232,960,832,649,466,338,883,700,572,206,1011,934,806,751,440,312,985, 857,674,546,180,908,780,725,414,286,959,831,648,520,154,882,699,388,260,1010, 805,750,128,984,673,362,907,596,230,830,464] [views:debug,2014-08-19T16:50:16.020,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/627. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:16.020,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",627,active,0} [rebalance:info,2014-08-19T16:50:16.037,ns_1@10.242.238.88:<0.1624.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 466 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:16.037,ns_1@10.242.238.88:<0.1701.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 465 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:16.038,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 466 state to active [rebalance:info,2014-08-19T16:50:16.039,ns_1@10.242.238.88:<0.1624.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 466 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:16.039,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 465 state to active [rebalance:info,2014-08-19T16:50:16.040,ns_1@10.242.238.88:<0.1701.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 465 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:16.041,ns_1@10.242.238.88:<0.1624.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:16.041,ns_1@10.242.238.88:<0.1701.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:16.112,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 625. Nacking mccouch update. [views:debug,2014-08-19T16:50:16.112,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/625. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:16.112,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",625,active,0} [ns_server:debug,2014-08-19T16:50:16.114,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,698,570,204,1009,932,804,749,438,310, 983,855,672,544,178,906,778,723,412,284,957,829,646,518,152,880,697,386,258, 1008,931,803,748,620,254,126,982,854,671,488,360,905,777,722,594,228,956,828, 645,462,334,879,696,568,202,1007,930,802,747,436,308,981,853,670,542,176,904, 776,721,410,282,955,827,644,516,150,878,695,384,256,1006,929,801,746,618,252, 124,980,852,669,486,358,903,775,720,592,226,954,826,643,460,332,877,694,566, 200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408,280,953,825, 642,514,148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667,484,356, 901,773,718,590,224,952,824,641,458,330,875,692,564,198,1003,926,798,743,432, 304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,691,508, 380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588,222,950, 822,767,639,456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664,536, 170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740, 612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326, 999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713,402, 274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844, 661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192, 920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762,634, 140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893,710, 582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,424,296, 969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683,500, 372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942,814, 759,631,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162, 890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604, 238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,629,446,318,991, 863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016, 939,811,756,628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,653, 470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,184,912, 784,729,418,290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132, 988,860,677,494,366,911,783,728,600,234,962,834,651,468,340,885,702,574,208, 1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727,416,288,961,833, 650,522,156,884,701,390,262,1012,935,807,752,624,130,986,858,675,492,364,909, 781,726,598,232,960,832,649,466,338,883,700,572,206,1011,934,806,751,440,312, 985,857,674,546,180,908,780,725,414,286,959,831,648,520,154,882,699,388,260, 1010,805,750,128,984,673,362,907,596,230,830,464] [views:debug,2014-08-19T16:50:16.171,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/625. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:16.171,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",625,active,0} [rebalance:info,2014-08-19T16:50:16.174,ns_1@10.242.238.88:<0.1484.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 468 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:16.174,ns_1@10.242.238.88:<0.1547.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 467 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:16.175,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 468 state to active [rebalance:info,2014-08-19T16:50:16.176,ns_1@10.242.238.88:<0.1484.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 468 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:16.176,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 467 state to active [rebalance:info,2014-08-19T16:50:16.177,ns_1@10.242.238.88:<0.1547.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 467 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:16.178,ns_1@10.242.238.88:<0.1484.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:16.178,ns_1@10.242.238.88:<0.1547.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:16.288,ns_1@10.242.238.88:<0.1936.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 718 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:16.288,ns_1@10.242.238.88:<0.1407.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 469 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:16.288,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 718 state to active [rebalance:info,2014-08-19T16:50:16.289,ns_1@10.242.238.88:<0.1936.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 718 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:16.290,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 469 state to active [rebalance:info,2014-08-19T16:50:16.291,ns_1@10.242.238.88:<0.1407.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 469 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:16.291,ns_1@10.242.238.88:<0.1936.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:16.291,ns_1@10.242.238.88:<0.1407.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:16.322,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 623. Nacking mccouch update. [views:debug,2014-08-19T16:50:16.322,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/623. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:16.322,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",623,active,0} [ns_server:debug,2014-08-19T16:50:16.324,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,698,570,204,1009,932,804,749,438,310, 983,855,672,544,178,906,778,723,412,284,957,829,646,518,152,880,697,386,258, 1008,931,803,748,620,254,126,982,854,671,488,360,905,777,722,594,228,956,828, 645,462,334,879,696,568,202,1007,930,802,747,436,308,981,853,670,542,176,904, 776,721,410,282,955,827,644,516,150,878,695,384,256,1006,929,801,746,618,252, 124,980,852,669,486,358,903,775,720,592,226,954,826,643,460,332,877,694,566, 200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408,280,953,825, 642,514,148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667,484,356, 901,773,718,590,224,952,824,641,458,330,875,692,564,198,1003,926,798,743,432, 304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,691,508, 380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588,222,950, 822,767,639,456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664,536, 170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740, 612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326, 999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713,402, 274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844, 661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192, 920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762,634, 140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893,710, 582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,424,296, 969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683,500, 372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942,814, 759,631,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162, 890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604, 238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,629,446,318,991, 863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016, 939,811,756,628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,653, 470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,184,912, 784,729,418,290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132, 988,860,677,494,366,911,783,728,600,234,962,834,651,468,340,885,702,574,208, 1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727,416,288,961,833, 650,522,156,884,701,390,262,1012,935,807,752,624,130,986,858,675,492,364,909, 781,726,598,232,960,832,649,466,338,883,700,572,206,1011,934,806,751,623,440, 312,985,857,674,546,180,908,780,725,414,286,959,831,648,520,154,882,699,388, 260,1010,805,750,128,984,673,362,907,596,230,830,464] [views:debug,2014-08-19T16:50:16.372,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/623. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:16.372,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",623,active,0} [rebalance:info,2014-08-19T16:50:16.408,ns_1@10.242.238.88:<0.1760.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 720 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:16.408,ns_1@10.242.238.88:<0.1858.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 719 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:16.409,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 720 state to active [rebalance:info,2014-08-19T16:50:16.410,ns_1@10.242.238.88:<0.1760.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 720 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:16.411,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 719 state to active [rebalance:info,2014-08-19T16:50:16.411,ns_1@10.242.238.88:<0.1858.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 719 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:16.412,ns_1@10.242.238.88:<0.1760.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:16.412,ns_1@10.242.238.88:<0.1858.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:16.474,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 621. Nacking mccouch update. [views:debug,2014-08-19T16:50:16.474,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/621. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:16.474,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",621,active,0} [ns_server:debug,2014-08-19T16:50:16.476,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,698,570,204,1009,932,804,749,621,438, 310,983,855,672,544,178,906,778,723,412,284,957,829,646,518,152,880,697,386, 258,1008,931,803,748,620,254,126,982,854,671,488,360,905,777,722,594,228,956, 828,645,462,334,879,696,568,202,1007,930,802,747,436,308,981,853,670,542,176, 904,776,721,410,282,955,827,644,516,150,878,695,384,256,1006,929,801,746,618, 252,124,980,852,669,486,358,903,775,720,592,226,954,826,643,460,332,877,694, 566,200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408,280,953, 825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667,484, 356,901,773,718,590,224,952,824,641,458,330,875,692,564,198,1003,926,798,743, 432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,691, 508,380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588,222, 950,822,767,639,456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664, 536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795, 740,612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454, 326,999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713, 402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972, 844,661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558, 192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762, 634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,424, 296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683, 500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942, 814,759,631,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528, 162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732, 604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,629,446,318, 991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266, 1016,939,811,756,628,134,990,862,679,496,368,913,785,730,602,236,108,964,836, 653,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,184, 912,784,729,418,290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626, 132,988,860,677,494,366,911,783,728,600,234,962,834,651,468,340,885,702,574, 208,1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727,416,288,961, 833,650,522,156,884,701,390,262,1012,935,807,752,624,130,986,858,675,492,364, 909,781,726,598,232,960,832,649,466,338,883,700,572,206,1011,934,806,751,623, 440,312,985,857,674,546,180,908,780,725,414,286,959,831,648,520,154,882,699, 388,260,1010,805,750,128,984,673,362,907,596,230,830,464] [rebalance:info,2014-08-19T16:50:16.492,ns_1@10.242.238.88:<0.1603.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 722 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:16.492,ns_1@10.242.238.88:<0.1680.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 721 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:16.493,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 722 state to active [rebalance:info,2014-08-19T16:50:16.493,ns_1@10.242.238.88:<0.1603.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 722 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:16.494,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 721 state to active [rebalance:info,2014-08-19T16:50:16.495,ns_1@10.242.238.88:<0.1680.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 721 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:16.495,ns_1@10.242.238.88:<0.1603.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:16.496,ns_1@10.242.238.88:<0.1680.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:16.525,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/621. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:16.525,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",621,active,0} [rebalance:info,2014-08-19T16:50:16.593,ns_1@10.242.238.88:<0.1449.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 724 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:16.593,ns_1@10.242.238.88:<0.1526.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 723 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:16.593,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 724 state to active [rebalance:info,2014-08-19T16:50:16.594,ns_1@10.242.238.88:<0.1449.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 724 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:16.594,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 723 state to active [rebalance:info,2014-08-19T16:50:16.595,ns_1@10.242.238.88:<0.1526.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 723 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:16.596,ns_1@10.242.238.88:<0.1449.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:16.596,ns_1@10.242.238.88:<0.1526.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:16.676,ns_1@10.242.238.88:<0.1371.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 725 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:16.676,ns_1@10.242.238.88:<0.1915.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 972 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:16.677,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 725 state to active [rebalance:info,2014-08-19T16:50:16.678,ns_1@10.242.238.88:<0.1371.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 725 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:16.678,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 972 state to active [rebalance:info,2014-08-19T16:50:16.679,ns_1@10.242.238.88:<0.1915.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 972 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:16.679,ns_1@10.242.238.88:<0.1371.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:16.680,ns_1@10.242.238.88:<0.1915.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:16.683,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 619. Nacking mccouch update. [views:debug,2014-08-19T16:50:16.683,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/619. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:16.683,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",619,active,0} [ns_server:debug,2014-08-19T16:50:16.685,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,932,804,749,621,438,310,983, 855,672,544,178,906,778,723,412,284,957,829,646,518,152,880,697,386,258,1008, 931,803,748,620,254,126,982,854,671,488,360,905,777,722,594,228,956,828,645, 462,334,879,696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176,904, 776,721,410,282,955,827,644,516,150,878,695,384,256,1006,929,801,746,618,252, 124,980,852,669,486,358,903,775,720,592,226,954,826,643,460,332,877,694,566, 200,1005,928,800,745,434,306,979,851,668,540,174,902,774,719,408,280,953,825, 642,514,148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667,484,356, 901,773,718,590,224,952,824,641,458,330,875,692,564,198,1003,926,798,743,432, 304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,691,508, 380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588,222,950, 822,767,639,456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664,536, 170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740, 612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326, 999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713,402, 274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844, 661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192, 920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762,634, 140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893,710, 582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,424,296, 969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683,500, 372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942,814, 759,631,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528,162, 890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732,604, 238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,629,446,318,991, 863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266,1016, 939,811,756,628,134,990,862,679,496,368,913,785,730,602,236,108,964,836,653, 470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,184,912, 784,729,418,290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626,132, 988,860,677,494,366,911,783,728,600,234,962,834,651,468,340,885,702,574,208, 1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727,416,288,961,833, 650,522,156,884,701,390,262,1012,935,807,752,624,130,986,858,675,492,364,909, 781,726,598,232,960,832,649,466,338,883,700,572,206,1011,934,806,751,623,440, 312,985,857,674,546,180,908,780,725,414,286,959,831,648,520,154,882,699,388, 260,1010,805,750,128,984,673,362,907,596,230,830,464,698,1009] [views:debug,2014-08-19T16:50:16.733,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/619. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:16.734,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",619,active,0} [rebalance:info,2014-08-19T16:50:16.744,ns_1@10.242.238.88:<0.1730.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 974 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:16.744,ns_1@10.242.238.88:<0.1823.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 973 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:16.744,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 974 state to active [rebalance:info,2014-08-19T16:50:16.745,ns_1@10.242.238.88:<0.1730.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 974 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:16.745,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 973 state to active [rebalance:info,2014-08-19T16:50:16.746,ns_1@10.242.238.88:<0.1823.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 973 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:16.747,ns_1@10.242.238.88:<0.1730.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:16.747,ns_1@10.242.238.88:<0.1823.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:16.828,ns_1@10.242.238.88:<0.1582.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 976 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:16.828,ns_1@10.242.238.88:<0.1653.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 975 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:16.828,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 976 state to active [rebalance:info,2014-08-19T16:50:16.829,ns_1@10.242.238.88:<0.1582.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 976 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:16.830,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 975 state to active [rebalance:info,2014-08-19T16:50:16.830,ns_1@10.242.238.88:<0.1653.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 975 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:16.831,ns_1@10.242.238.88:<0.1582.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:16.831,ns_1@10.242.238.88:<0.1653.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:16.909,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 617. Nacking mccouch update. [views:debug,2014-08-19T16:50:16.909,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/617. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:16.909,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",617,active,0} [ns_server:debug,2014-08-19T16:50:16.910,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,932,804,749,621,438,310,983, 855,672,544,178,906,778,723,412,284,957,829,646,518,152,880,697,386,258,1008, 931,803,748,620,254,126,982,854,671,488,360,905,777,722,594,228,956,828,645, 462,334,879,696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176,904, 776,721,410,282,955,827,644,516,150,878,695,384,256,1006,929,801,746,618,252, 124,980,852,669,486,358,903,775,720,592,226,954,826,643,460,332,877,694,566, 200,1005,928,800,745,617,434,306,979,851,668,540,174,902,774,719,408,280,953, 825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667,484, 356,901,773,718,590,224,952,824,641,458,330,875,692,564,198,1003,926,798,743, 432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,691, 508,380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588,222, 950,822,767,639,456,328,873,690,562,196,1001,924,796,741,430,302,975,847,664, 536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795, 740,612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454, 326,999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768,713, 402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972, 844,661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558, 192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762, 634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,424, 296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683, 500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942, 814,759,631,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656,528, 162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732, 604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,629,446,318, 991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394,266, 1016,939,811,756,628,134,990,862,679,496,368,913,785,730,602,236,108,964,836, 653,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,184, 912,784,729,418,290,963,835,652,524,158,886,703,392,264,1014,937,809,754,626, 132,988,860,677,494,366,911,783,728,600,234,962,834,651,468,340,885,702,574, 208,1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727,416,288,961, 833,650,522,156,884,701,390,262,1012,935,807,752,624,130,986,858,675,492,364, 909,781,726,598,232,960,832,649,466,338,883,700,572,206,1011,934,806,751,623, 440,312,985,857,674,546,180,908,780,725,414,286,959,831,648,520,154,882,699, 388,260,1010,805,750,128,984,673,362,907,596,230,830,464,698,1009] [rebalance:info,2014-08-19T16:50:16.920,ns_1@10.242.238.88:<0.1428.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 978 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:16.920,ns_1@10.242.238.88:<0.1505.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 977 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:16.920,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 978 state to active [rebalance:info,2014-08-19T16:50:16.921,ns_1@10.242.238.88:<0.1428.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 978 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:16.921,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 977 state to active [rebalance:info,2014-08-19T16:50:16.922,ns_1@10.242.238.88:<0.1505.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 977 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:16.923,ns_1@10.242.238.88:<0.1428.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:16.923,ns_1@10.242.238.88:<0.1505.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:16.993,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/617. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:16.993,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",617,active,0} [rebalance:info,2014-08-19T16:50:17.037,ns_1@10.242.238.88:<0.1350.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 979 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:17.037,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 979 state to active [rebalance:info,2014-08-19T16:50:17.038,ns_1@10.242.238.88:<0.1350.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 979 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:17.038,ns_1@10.242.238.88:<0.1350.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:17.105,ns_1@10.242.238.88:<0.3063.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 448 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:17.105,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 448 state to active [rebalance:info,2014-08-19T16:50:17.106,ns_1@10.242.238.88:<0.3063.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 448 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:17.107,ns_1@10.242.238.88:<0.3063.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:17.162,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 615. Nacking mccouch update. [views:debug,2014-08-19T16:50:17.162,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/615. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:17.162,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",615,active,0} [ns_server:debug,2014-08-19T16:50:17.164,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,932,804,749,621,438,310,983, 855,672,544,178,906,778,723,412,284,957,829,646,518,152,880,697,386,258,1008, 931,803,748,620,254,126,982,854,671,488,360,905,777,722,594,228,956,828,645, 462,334,879,696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176,904, 776,721,410,282,955,827,644,516,150,878,695,384,256,1006,929,801,746,618,252, 124,980,852,669,486,358,903,775,720,592,226,954,826,643,460,332,877,694,566, 200,1005,928,800,745,617,434,306,979,851,668,540,174,902,774,719,408,280,953, 825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667,484, 356,901,773,718,590,224,952,824,641,458,330,875,692,564,198,1003,926,798,743, 615,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874, 691,508,380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588, 222,950,822,767,639,456,328,873,690,562,196,1001,924,796,741,430,302,975,847, 664,536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923, 795,740,612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637, 454,326,999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896,768, 713,402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116, 972,844,661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686, 558,192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945,817, 762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866, 683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019, 942,814,759,631,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656, 528,162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787, 732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,629,446, 318,991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394, 266,1016,939,811,756,628,134,990,862,679,496,368,913,785,730,602,236,108,964, 836,653,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550, 184,912,784,729,418,290,963,835,652,524,158,886,703,392,264,1014,937,809,754, 626,132,988,860,677,494,366,911,783,728,600,234,962,834,651,468,340,885,702, 574,208,1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727,416,288, 961,833,650,522,156,884,701,390,262,1012,935,807,752,624,130,986,858,675,492, 364,909,781,726,598,232,960,832,649,466,338,883,700,572,206,1011,934,806,751, 623,440,312,985,857,674,546,180,908,780,725,414,286,959,831,648,520,154,882, 699,388,260,1010,805,750,128,984,673,362,907,596,230,830,464,698,1009] [rebalance:info,2014-08-19T16:50:17.172,ns_1@10.242.238.88:<0.2965.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 450 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:17.172,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 450 state to active [rebalance:info,2014-08-19T16:50:17.173,ns_1@10.242.238.88:<0.2965.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 450 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:17.174,ns_1@10.242.238.88:<0.2965.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:17.205,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/615. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:17.205,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",615,active,0} [rebalance:info,2014-08-19T16:50:17.231,ns_1@10.242.238.88:<0.3021.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 449 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:17.231,ns_1@10.242.238.88:<0.2806.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 452 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:17.231,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 449 state to active [rebalance:info,2014-08-19T16:50:17.232,ns_1@10.242.238.88:<0.3021.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 449 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:17.232,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 452 state to active [rebalance:info,2014-08-19T16:50:17.234,ns_1@10.242.238.88:<0.2806.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 452 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:17.234,ns_1@10.242.238.88:<0.3021.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:17.235,ns_1@10.242.238.88:<0.2806.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:17.280,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 613. Nacking mccouch update. [views:debug,2014-08-19T16:50:17.280,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/613. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:17.280,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",613,active,0} [ns_server:debug,2014-08-19T16:50:17.282,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,932,804,749,621,438,310,983, 855,672,544,178,906,778,723,412,284,957,829,646,518,152,880,697,386,258,1008, 931,803,748,620,254,126,982,854,671,488,360,905,777,722,594,228,956,828,645, 462,334,879,696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176,904, 776,721,410,282,955,827,644,516,150,878,695,384,256,1006,929,801,746,618,252, 124,980,852,669,486,358,903,775,720,592,226,954,826,643,460,332,877,694,566, 200,1005,928,800,745,617,434,306,979,851,668,540,174,902,774,719,408,280,953, 825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667,484, 356,901,773,718,590,224,952,824,641,458,330,875,692,564,198,1003,926,798,743, 615,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874, 691,508,380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588, 222,950,822,767,639,456,328,873,690,562,196,1001,924,796,741,613,430,302,975, 847,664,536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000, 923,795,740,612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765, 637,454,326,999,871,688,560,194,922,794,739,428,300,973,845,662,534,168,896, 768,713,402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244, 116,972,844,661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869, 686,558,192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022,945, 817,762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476, 348,893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790, 735,424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994, 866,683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,422,294,967,839, 656,528,162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915, 787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,629, 446,318,991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,705, 394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785,730,602,236,108, 964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678, 550,184,912,784,729,418,290,963,835,652,524,158,886,703,392,264,1014,937,809, 754,626,132,988,860,677,494,366,911,783,728,600,234,962,834,651,468,340,885, 702,574,208,1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727,416, 288,961,833,650,522,156,884,701,390,262,1012,935,807,752,624,130,986,858,675, 492,364,909,781,726,598,232,960,832,649,466,338,883,700,572,206,1011,934,806, 751,623,440,312,985,857,674,546,180,908,780,725,414,286,959,831,648,520,154, 882,699,388,260,1010,805,750,128,984,673,362,907,596,230,830,464,698,1009] [views:debug,2014-08-19T16:50:17.314,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/613. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:17.314,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",613,active,0} [rebalance:info,2014-08-19T16:50:17.364,ns_1@10.242.238.88:<0.2883.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 451 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:17.364,ns_1@10.242.238.88:<0.2637.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 454 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:17.365,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 451 state to active [rebalance:info,2014-08-19T16:50:17.366,ns_1@10.242.238.88:<0.2883.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 451 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:17.366,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 454 state to active [rebalance:info,2014-08-19T16:50:17.367,ns_1@10.242.238.88:<0.2637.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 454 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:17.367,ns_1@10.242.238.88:<0.2883.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:17.368,ns_1@10.242.238.88:<0.2637.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:17.389,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 611. Nacking mccouch update. [views:debug,2014-08-19T16:50:17.389,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/611. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:17.389,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",611,active,0} [ns_server:debug,2014-08-19T16:50:17.390,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,932,804,749,621,438,310,983, 855,672,544,178,906,778,723,412,284,957,829,646,518,152,880,697,386,258,1008, 931,803,748,620,254,126,982,854,671,488,360,905,777,722,594,228,956,828,645, 462,334,879,696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176,904, 776,721,410,282,955,827,644,516,150,878,695,384,256,1006,929,801,746,618,252, 124,980,852,669,486,358,903,775,720,592,226,954,826,643,460,332,877,694,566, 200,1005,928,800,745,617,434,306,979,851,668,540,174,902,774,719,408,280,953, 825,642,514,148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667,484, 356,901,773,718,590,224,952,824,641,458,330,875,692,564,198,1003,926,798,743, 615,432,304,977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874, 691,508,380,1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588, 222,950,822,767,639,456,328,873,690,562,196,1001,924,796,741,613,430,302,975, 847,664,536,170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000, 923,795,740,612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765, 637,454,326,999,871,688,560,194,922,794,739,611,428,300,973,845,662,534,168, 896,768,713,402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610, 244,116,972,844,661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997, 869,686,558,192,920,792,737,426,298,971,843,660,532,166,894,711,400,272,1022, 945,817,762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659, 476,348,893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918, 790,735,424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138, 994,866,683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580, 214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,422,294,967, 839,656,528,162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370, 915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757, 629,446,318,991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888, 705,394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785,730,602,236, 108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861, 678,550,184,912,784,729,418,290,963,835,652,524,158,886,703,392,264,1014,937, 809,754,626,132,988,860,677,494,366,911,783,728,600,234,962,834,651,468,340, 885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727, 416,288,961,833,650,522,156,884,701,390,262,1012,935,807,752,624,130,986,858, 675,492,364,909,781,726,598,232,960,832,649,466,338,883,700,572,206,1011,934, 806,751,623,440,312,985,857,674,546,180,908,780,725,414,286,959,831,648,520, 154,882,699,388,260,1010,805,750,128,984,673,362,907,596,230,830,464,698, 1009] [views:debug,2014-08-19T16:50:17.423,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/611. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:17.423,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",611,active,0} [rebalance:info,2014-08-19T16:50:17.498,ns_1@10.242.238.88:<0.2483.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 456 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:17.498,ns_1@10.242.238.88:<0.2715.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 453 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:17.498,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 456 state to active [rebalance:info,2014-08-19T16:50:17.499,ns_1@10.242.238.88:<0.2483.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 456 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:17.500,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 453 state to active [rebalance:info,2014-08-19T16:50:17.501,ns_1@10.242.238.88:<0.2715.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 453 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:17.501,ns_1@10.242.238.88:<0.2483.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:17.501,ns_1@10.242.238.88:<0.2715.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:17.524,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 609. Nacking mccouch update. [views:debug,2014-08-19T16:50:17.524,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/609. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:17.524,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",609,active,0} [ns_server:debug,2014-08-19T16:50:17.526,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,855,672,544, 178,906,778,723,412,284,957,829,646,518,152,880,697,386,258,1008,931,803,748, 620,254,126,982,854,671,488,360,905,777,722,594,228,956,828,645,462,334,879, 696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176,904,776,721,410, 282,955,827,644,516,150,878,695,384,256,1006,929,801,746,618,252,124,980,852, 669,486,358,903,775,720,592,226,954,826,643,460,332,877,694,566,200,1005,928, 800,745,617,434,306,979,851,668,540,174,902,774,719,408,280,953,825,642,514, 148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667,484,356,901,773, 718,590,224,952,824,641,458,330,875,692,564,198,1003,926,798,743,615,432,304, 977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,691,508,380, 1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588,222,950,822, 767,639,456,328,873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536, 170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740, 612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326, 999,871,688,560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713, 402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972, 844,661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558, 192,920,792,737,609,426,298,971,843,660,532,166,894,711,400,272,1022,945,817, 762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866, 683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019, 942,814,759,631,448,320,993,865,682,554,188,916,788,733,422,294,967,839,656, 528,162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787, 732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,629,446, 318,991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,705,394, 266,1016,939,811,756,628,134,990,862,679,496,368,913,785,730,602,236,108,964, 836,653,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550, 184,912,784,729,418,290,963,835,652,524,158,886,703,392,264,1014,937,809,754, 626,132,988,860,677,494,366,911,783,728,600,234,962,834,651,468,340,885,702, 574,208,1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727,416,288, 961,833,650,522,156,884,701,390,262,1012,935,807,752,624,130,986,858,675,492, 364,909,781,726,598,232,960,832,649,466,338,883,700,572,206,1011,934,806,751, 623,440,312,985,857,674,546,180,908,780,725,414,286,959,831,648,520,154,882, 699,388,260,1010,805,750,128,984,673,362,907,596,230,830,464,698,1009,932, 621,310] [views:debug,2014-08-19T16:50:17.584,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/609. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:17.584,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",609,active,0} [rebalance:info,2014-08-19T16:50:17.615,ns_1@10.242.238.88:<0.2288.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 458 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:17.615,ns_1@10.242.238.88:<0.2560.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 455 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:17.616,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 458 state to active [rebalance:info,2014-08-19T16:50:17.617,ns_1@10.242.238.88:<0.2288.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 458 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:17.617,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 455 state to active [rebalance:info,2014-08-19T16:50:17.618,ns_1@10.242.238.88:<0.2560.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 455 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:17.618,ns_1@10.242.238.88:<0.2288.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:17.619,ns_1@10.242.238.88:<0.2560.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:17.632,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_pull:341]Pulling config from: 'ns_1@10.242.238.91' [ns_server:debug,2014-08-19T16:50:17.658,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 607. Nacking mccouch update. [views:debug,2014-08-19T16:50:17.658,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/607. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:17.658,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",607,active,0} [ns_server:debug,2014-08-19T16:50:17.660,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,855,672,544, 178,906,778,723,412,284,957,829,646,518,152,880,697,386,258,1008,931,803,748, 620,254,126,982,854,671,488,360,905,777,722,594,228,956,828,645,462,334,879, 696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176,904,776,721,410, 282,955,827,644,516,150,878,695,384,256,1006,929,801,746,618,252,124,980,852, 669,486,358,903,775,720,592,226,954,826,643,460,332,877,694,566,200,1005,928, 800,745,617,434,306,979,851,668,540,174,902,774,719,408,280,953,825,642,514, 148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667,484,356,901,773, 718,590,224,952,824,641,458,330,875,692,564,198,1003,926,798,743,615,432,304, 977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,691,508,380, 1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588,222,950,822, 767,639,456,328,873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536, 170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740, 612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326, 999,871,688,560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713, 402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972, 844,661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558, 192,920,792,737,609,426,298,971,843,660,532,166,894,711,400,272,1022,945,817, 762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994, 866,683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,422,294,967,839, 656,528,162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915, 787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,629, 446,318,991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888,705, 394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785,730,602,236,108, 964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678, 550,184,912,784,729,418,290,963,835,652,524,158,886,703,392,264,1014,937,809, 754,626,132,988,860,677,494,366,911,783,728,600,234,962,834,651,468,340,885, 702,574,208,1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727,416, 288,961,833,650,522,156,884,701,390,262,1012,935,807,752,624,130,986,858,675, 492,364,909,781,726,598,232,960,832,649,466,338,883,700,572,206,1011,934,806, 751,623,440,312,985,857,674,546,180,908,780,725,414,286,959,831,648,520,154, 882,699,388,260,1010,805,750,128,984,673,362,907,596,230,830,464,698,1009, 932,621,310] [rebalance:info,2014-08-19T16:50:17.716,ns_1@10.242.238.88:<0.2365.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 457 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:17.716,ns_1@10.242.238.88:<0.2134.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 460 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:17.716,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 457 state to active [views:debug,2014-08-19T16:50:17.717,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/607. Updated state: active (0) [rebalance:info,2014-08-19T16:50:17.717,ns_1@10.242.238.88:<0.2365.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 457 on ns_1@10.242.238.88 [ns_server:debug,2014-08-19T16:50:17.718,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",607,active,0} [ns_server:info,2014-08-19T16:50:17.718,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 460 state to active [rebalance:info,2014-08-19T16:50:17.721,ns_1@10.242.238.88:<0.2134.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 460 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:17.721,ns_1@10.242.238.88:<0.2365.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:17.721,ns_1@10.242.238.88:<0.2134.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:17.834,ns_1@10.242.238.88:<0.2219.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 459 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:17.834,ns_1@10.242.238.88:<0.1957.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 462 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:17.835,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 459 state to active [rebalance:info,2014-08-19T16:50:17.836,ns_1@10.242.238.88:<0.2219.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 459 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:17.836,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 462 state to active [rebalance:info,2014-08-19T16:50:17.837,ns_1@10.242.238.88:<0.1957.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 462 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:17.837,ns_1@10.242.238.88:<0.2219.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:17.838,ns_1@10.242.238.88:<0.1957.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:17.885,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 605. Nacking mccouch update. [views:debug,2014-08-19T16:50:17.885,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/605. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:17.885,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",605,active,0} [ns_server:debug,2014-08-19T16:50:17.887,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,855,672,544, 178,906,778,723,412,284,957,829,646,518,152,880,697,386,258,1008,931,803,748, 620,254,126,982,854,671,488,360,905,777,722,594,228,956,828,645,462,334,879, 696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176,904,776,721,410, 282,955,827,644,516,150,878,695,384,256,1006,929,801,746,618,252,124,980,852, 669,486,358,903,775,720,592,226,954,826,643,460,332,877,694,566,200,1005,928, 800,745,617,434,306,979,851,668,540,174,902,774,719,408,280,953,825,642,514, 148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667,484,356,901,773, 718,590,224,952,824,641,458,330,875,692,564,198,1003,926,798,743,615,432,304, 977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,691,508,380, 1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588,222,950,822, 767,639,456,328,873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536, 170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740, 612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326, 999,871,688,560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713, 402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972, 844,661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558, 192,920,792,737,609,426,298,971,843,660,532,166,894,711,400,272,1022,945,817, 762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994, 866,683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967, 839,656,528,162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370, 915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757, 629,446,318,991,863,680,552,186,914,786,731,420,292,965,837,654,526,160,888, 705,394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785,730,602,236, 108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861, 678,550,184,912,784,729,418,290,963,835,652,524,158,886,703,392,264,1014,937, 809,754,626,132,988,860,677,494,366,911,783,728,600,234,962,834,651,468,340, 885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727, 416,288,961,833,650,522,156,884,701,390,262,1012,935,807,752,624,130,986,858, 675,492,364,909,781,726,598,232,960,832,649,466,338,883,700,572,206,1011,934, 806,751,623,440,312,985,857,674,546,180,908,780,725,414,286,959,831,648,520, 154,882,699,388,260,1010,805,750,128,984,673,362,907,596,230,830,464,698, 1009,932,621,310] [views:debug,2014-08-19T16:50:17.944,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/605. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:17.944,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",605,active,0} [ns_server:debug,2014-08-19T16:50:17.949,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,448, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_448_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_448_'ns_1@10.242.238.91'">>}]}, {move_state,704, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_704_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_704_'ns_1@10.242.238.91'">>}]}, {move_state,449, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_449_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_449_'ns_1@10.242.238.91'">>}]}, {move_state,705, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_705_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_705_'ns_1@10.242.238.91'">>}]}, {move_state,450, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_450_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_450_'ns_1@10.242.238.91'">>}]}, {move_state,706, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_706_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_706_'ns_1@10.242.238.91'">>}]}, {move_state,960, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_960_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_960_'ns_1@10.242.238.90'">>}]}, {move_state,451, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_451_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_451_'ns_1@10.242.238.91'">>}]}, {move_state,707, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_707_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_707_'ns_1@10.242.238.91'">>}]}, {move_state,961, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_961_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_961_'ns_1@10.242.238.90'">>}]}, {move_state,452, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_452_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_452_'ns_1@10.242.238.91'">>}]}, {move_state,708, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_708_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_708_'ns_1@10.242.238.91'">>}]}, {move_state,962, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_962_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_962_'ns_1@10.242.238.90'">>}]}, {move_state,453, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_453_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_453_'ns_1@10.242.238.91'">>}]}, {move_state,709, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_709_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_709_'ns_1@10.242.238.91'">>}]}, {move_state,963, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_963_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_963_'ns_1@10.242.238.90'">>}]}, {move_state,454, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_454_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_454_'ns_1@10.242.238.91'">>}]}, {move_state,710, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_710_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_710_'ns_1@10.242.238.91'">>}]}, {move_state,964, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_964_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_964_'ns_1@10.242.238.90'">>}]}, {move_state,455, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_455_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_455_'ns_1@10.242.238.91'">>}]}, {move_state,711, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_711_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_711_'ns_1@10.242.238.91'">>}]}, {move_state,965, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_965_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_965_'ns_1@10.242.238.90'">>}]}, {move_state,456, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_456_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_456_'ns_1@10.242.238.91'">>}]}, {move_state,712, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_712_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_712_'ns_1@10.242.238.91'">>}]}, {move_state,966, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_966_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_966_'ns_1@10.242.238.90'">>}]}, {move_state,457, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_457_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_457_'ns_1@10.242.238.91'">>}]}, {move_state,713, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_713_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_713_'ns_1@10.242.238.91'">>}]}, {move_state,967, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_967_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_967_'ns_1@10.242.238.90'">>}]}, {move_state,458, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_458_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_458_'ns_1@10.242.238.91'">>}]}, {move_state,714, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_714_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_714_'ns_1@10.242.238.91'">>}]}, {move_state,968, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_968_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_968_'ns_1@10.242.238.90'">>}]}, {move_state,459, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_459_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_459_'ns_1@10.242.238.91'">>}]}, {move_state,715, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_715_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_715_'ns_1@10.242.238.91'">>}]}, {move_state,969, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_969_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_969_'ns_1@10.242.238.90'">>}]}, {move_state,460, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_460_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_460_'ns_1@10.242.238.91'">>}]}, {move_state,716, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_716_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_716_'ns_1@10.242.238.91'">>}]}, {move_state,970, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_970_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_970_'ns_1@10.242.238.90'">>}]}, {move_state,461, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_461_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_461_'ns_1@10.242.238.91'">>}]}, {move_state,717, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_717_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_717_'ns_1@10.242.238.91'">>}]}, {move_state,971, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_971_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_971_'ns_1@10.242.238.90'">>}]}, {move_state,462, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_462_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_462_'ns_1@10.242.238.91'">>}]}, {move_state,718, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_718_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_718_'ns_1@10.242.238.91'">>}]}, {move_state,972, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_972_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_972_'ns_1@10.242.238.90'">>}]}, {move_state,463, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_463_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_463_'ns_1@10.242.238.91'">>}]}, {move_state,719, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_719_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_719_'ns_1@10.242.238.91'">>}]}, {move_state,973, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_973_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_973_'ns_1@10.242.238.90'">>}]}, {move_state,464, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_464_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_464_'ns_1@10.242.238.91'">>}]}, {move_state,720, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_720_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_720_'ns_1@10.242.238.91'">>}]}, {move_state,974, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_974_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_974_'ns_1@10.242.238.90'">>}]}, {move_state,465, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_465_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_465_'ns_1@10.242.238.91'">>}]}, {move_state,721, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_721_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_721_'ns_1@10.242.238.91'">>}]}, {move_state,975, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_975_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_975_'ns_1@10.242.238.90'">>}]}, {move_state,466, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_466_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_466_'ns_1@10.242.238.91'">>}]}, {move_state,722, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_722_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_722_'ns_1@10.242.238.91'">>}]}, {move_state,976, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_976_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_976_'ns_1@10.242.238.90'">>}]}, {move_state,467, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_467_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_467_'ns_1@10.242.238.91'">>}]}, {move_state,723, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_723_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_723_'ns_1@10.242.238.91'">>}]}, {move_state,977, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_977_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_977_'ns_1@10.242.238.90'">>}]}, {move_state,468, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_468_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_468_'ns_1@10.242.238.91'">>}]}, {move_state,724, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_724_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_724_'ns_1@10.242.238.91'">>}]}, {move_state,978, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_978_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_978_'ns_1@10.242.238.90'">>}]}, {move_state,469, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_469_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_469_'ns_1@10.242.238.91'">>}]}, {move_state,725, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_725_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_725_'ns_1@10.242.238.91'">>}]}, {move_state,979, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_979_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_979_'ns_1@10.242.238.90'">>}]}] [ns_server:debug,2014-08-19T16:50:17.955,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 448, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.955,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 704, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 449, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 705, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 450, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 706, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [rebalance:info,2014-08-19T16:50:17.960,ns_1@10.242.238.88:<0.4016.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 464) [rebalance:info,2014-08-19T16:50:17.960,ns_1@10.242.238.88:<0.2052.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 461 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:debug,2014-08-19T16:50:17.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 960, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:info,2014-08-19T16:50:17.960,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 461 state to active [rebalance:info,2014-08-19T16:50:17.961,ns_1@10.242.238.88:<0.1802.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:17.962,ns_1@10.242.238.88:<0.2052.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 461 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:17.962,ns_1@10.242.238.88:<0.2052.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:17.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 451, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 707, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 961, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:info,2014-08-19T16:50:17.964,ns_1@10.242.238.88:<0.1810.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_464_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:17.965,ns_1@10.242.238.88:<0.1802.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:17.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 452, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 708, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 962, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:17.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 453, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.967,ns_1@10.242.238.88:<0.1802.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 464 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4030.1> [ns_server:debug,2014-08-19T16:50:17.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 709, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 963, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:info,2014-08-19T16:50:17.968,ns_1@10.242.238.88:<0.4030.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 464 to state replica [ns_server:debug,2014-08-19T16:50:17.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 454, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 710, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 964, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:17.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 455, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 711, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 965, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:17.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 456, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 712, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 966, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:17.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 457, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 713, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 967, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:17.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 458, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 714, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 968, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:17.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 459, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 715, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 969, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:17.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 460, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 716, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 970, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:17.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 461, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 717, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 971, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:17.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 462, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 718, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 972, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:17.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 463, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 719, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 973, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:17.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 464, [{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 720, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 974, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:17.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 465, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 721, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 975, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:17.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 466, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.985,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 722, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.985,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 976, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:17.990,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 467, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.991,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 723, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.992,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 977, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:17.992,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 468, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.993,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 724, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.994,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 978, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:17.994,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 469, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.994,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 725, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:17.995,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 979, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:18.005,ns_1@10.242.238.88:<0.4030.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_464 [rebalance:info,2014-08-19T16:50:18.006,ns_1@10.242.238.88:<0.4030.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[464]}, {checkpoints,[{464,1}]}, {name,<<"rebalance_464">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[464]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"464"}]} [rebalance:debug,2014-08-19T16:50:18.007,ns_1@10.242.238.88:<0.4030.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4081.1> [rebalance:info,2014-08-19T16:50:18.008,ns_1@10.242.238.88:<0.4030.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:18.010,ns_1@10.242.238.88:<0.4030.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:18.010,ns_1@10.242.238.88:<0.1880.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 463 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:18.010,ns_1@10.242.238.88:<0.4082.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 466) [rebalance:info,2014-08-19T16:50:18.010,ns_1@10.242.238.88:<0.4030.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:50:18.010,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 463 state to active [rebalance:info,2014-08-19T16:50:18.011,ns_1@10.242.238.88:<0.1624.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:18.011,ns_1@10.242.238.88:<0.1880.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 463 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:18.012,ns_1@10.242.238.88:<0.1880.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:18.012,ns_1@10.242.238.88:<0.1802.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 464 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:18.014,ns_1@10.242.238.88:<0.1810.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:18.015,ns_1@10.242.238.88:<0.1632.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_466_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:18.015,ns_1@10.242.238.88:<0.1624.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:18.019,ns_1@10.242.238.88:<0.1624.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 466 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4089.1> [ns_server:info,2014-08-19T16:50:18.019,ns_1@10.242.238.88:<0.1810.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_464_'ns_1@10.242.238.91'">>] [ns_server:info,2014-08-19T16:50:18.020,ns_1@10.242.238.88:<0.4089.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 466 to state replica [rebalance:info,2014-08-19T16:50:18.020,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 464 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:18.020,ns_1@10.242.238.88:<0.4093.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 464 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:18.036,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.037,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.037,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:18.037,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{464, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:18.037,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:18.048,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 464 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:50:18.048,ns_1@10.242.238.88:<0.3042.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 704 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:18.048,ns_1@10.242.238.88:<0.4116.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 465) [rebalance:info,2014-08-19T16:50:18.048,ns_1@10.242.238.88:<0.4117.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 467) [rebalance:info,2014-08-19T16:50:18.048,ns_1@10.242.238.88:<0.4118.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 468) [ns_server:debug,2014-08-19T16:50:18.049,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 464) [ns_server:info,2014-08-19T16:50:18.049,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 704 state to active [rebalance:info,2014-08-19T16:50:18.049,ns_1@10.242.238.88:<0.1701.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:18.050,ns_1@10.242.238.88:<0.1547.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [ns_server:debug,2014-08-19T16:50:18.050,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:18.050,ns_1@10.242.238.88:<0.1484.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:18.050,ns_1@10.242.238.88:<0.3042.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 704 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:18.051,ns_1@10.242.238.88:<0.3042.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:18.053,ns_1@10.242.238.88:<0.1709.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_465_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:18.053,ns_1@10.242.238.88:<0.1701.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:18.053,ns_1@10.242.238.88:<0.1555.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_467_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:18.053,ns_1@10.242.238.88:<0.1547.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:18.054,ns_1@10.242.238.88:<0.1492.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_468_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:18.054,ns_1@10.242.238.88:<0.1484.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:18.054,ns_1@10.242.238.88:<0.4089.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_466 [rebalance:info,2014-08-19T16:50:18.055,ns_1@10.242.238.88:<0.4089.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[466]}, {checkpoints,[{466,1}]}, {name,<<"rebalance_466">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[466]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"466"}]} [ns_server:debug,2014-08-19T16:50:18.056,ns_1@10.242.238.88:<0.1701.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 465 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4130.1> [rebalance:debug,2014-08-19T16:50:18.056,ns_1@10.242.238.88:<0.4089.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4132.1> [ns_server:debug,2014-08-19T16:50:18.056,ns_1@10.242.238.88:<0.1547.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 467 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4131.1> [ns_server:debug,2014-08-19T16:50:18.056,ns_1@10.242.238.88:<0.1484.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 468 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4133.1> [ns_server:info,2014-08-19T16:50:18.057,ns_1@10.242.238.88:<0.4130.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 465 to state replica [ns_server:info,2014-08-19T16:50:18.057,ns_1@10.242.238.88:<0.4131.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 467 to state replica [rebalance:info,2014-08-19T16:50:18.057,ns_1@10.242.238.88:<0.4089.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:50:18.057,ns_1@10.242.238.88:<0.4133.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 468 to state replica [rebalance:debug,2014-08-19T16:50:18.059,ns_1@10.242.238.88:<0.4089.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:18.059,ns_1@10.242.238.88:<0.4089.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:18.060,ns_1@10.242.238.88:<0.1624.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 466 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:18.061,ns_1@10.242.238.88:<0.1632.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:18.066,ns_1@10.242.238.88:<0.1632.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_466_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:18.066,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 466 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:18.066,ns_1@10.242.238.88:<0.4137.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 466 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:18.079,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:18.079,ns_1@10.242.238.88:<0.2939.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 706 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:18.079,ns_1@10.242.238.88:<0.4139.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 469) [ns_server:info,2014-08-19T16:50:18.079,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 706 state to active [ns_server:debug,2014-08-19T16:50:18.080,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:18.080,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{466, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:18.080,ns_1@10.242.238.88:<0.1407.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [ns_server:debug,2014-08-19T16:50:18.080,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.080,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:18.081,ns_1@10.242.238.88:<0.2939.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 706 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:18.082,ns_1@10.242.238.88:<0.2939.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:18.083,ns_1@10.242.238.88:<0.1415.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_469_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:18.083,ns_1@10.242.238.88:<0.1407.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:18.086,ns_1@10.242.238.88:<0.1407.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 469 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4153.1> [ns_server:info,2014-08-19T16:50:18.087,ns_1@10.242.238.88:<0.4153.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 469 to state replica [rebalance:info,2014-08-19T16:50:18.087,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 466 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:18.088,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 466) [ns_server:debug,2014-08-19T16:50:18.089,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:18.094,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 603. Nacking mccouch update. [views:debug,2014-08-19T16:50:18.094,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/603. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:18.094,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",603,active,0} [ns_server:debug,2014-08-19T16:50:18.096,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,855,672,544, 178,906,778,723,412,284,957,829,646,518,152,880,697,386,258,1008,931,803,748, 620,254,126,982,854,671,488,360,905,777,722,594,228,956,828,645,462,334,879, 696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176,904,776,721,410, 282,955,827,644,516,150,878,695,384,256,1006,929,801,746,618,252,124,980,852, 669,486,358,903,775,720,592,226,954,826,643,460,332,877,694,566,200,1005,928, 800,745,617,434,306,979,851,668,540,174,902,774,719,408,280,953,825,642,514, 148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667,484,356,901,773, 718,590,224,952,824,641,458,330,875,692,564,198,1003,926,798,743,615,432,304, 977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,691,508,380, 1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588,222,950,822, 767,639,456,328,873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536, 170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740, 612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326, 999,871,688,560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713, 402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972, 844,661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558, 192,920,792,737,609,426,298,971,843,660,532,166,894,711,400,272,1022,945,817, 762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994, 866,683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967, 839,656,528,162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370, 915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757, 629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654,526,160, 888,705,394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785,730,602, 236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444,316,989, 861,678,550,184,912,784,729,418,290,963,835,652,524,158,886,703,392,264,1014, 937,809,754,626,132,988,860,677,494,366,911,783,728,600,234,962,834,651,468, 340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,182,910,782, 727,416,288,961,833,650,522,156,884,701,390,262,1012,935,807,752,624,130,986, 858,675,492,364,909,781,726,598,232,960,832,649,466,338,883,700,572,206,1011, 934,806,751,623,440,312,985,857,674,546,180,908,780,725,414,286,959,831,648, 520,154,882,699,388,260,1010,805,750,128,984,673,362,907,596,230,830,464,698, 1009,932,621,310] [ns_server:debug,2014-08-19T16:50:18.099,ns_1@10.242.238.88:<0.4130.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_465 [rebalance:info,2014-08-19T16:50:18.100,ns_1@10.242.238.88:<0.4130.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[465]}, {checkpoints,[{465,1}]}, {name,<<"rebalance_465">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[465]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"465"}]} [rebalance:debug,2014-08-19T16:50:18.101,ns_1@10.242.238.88:<0.4130.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4155.1> [rebalance:info,2014-08-19T16:50:18.101,ns_1@10.242.238.88:<0.4130.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:18.103,ns_1@10.242.238.88:<0.4130.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:18.103,ns_1@10.242.238.88:<0.4130.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:18.104,ns_1@10.242.238.88:<0.1701.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 465 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:18.107,ns_1@10.242.238.88:<0.1709.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:18.110,ns_1@10.242.238.88:<0.1709.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_465_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:18.110,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 465 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:18.110,ns_1@10.242.238.88:<0.4159.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 465 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:18.111,ns_1@10.242.238.88:<0.4131.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_467 [rebalance:info,2014-08-19T16:50:18.112,ns_1@10.242.238.88:<0.4131.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[467]}, {checkpoints,[{467,1}]}, {name,<<"rebalance_467">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[467]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"467"}]} [rebalance:debug,2014-08-19T16:50:18.113,ns_1@10.242.238.88:<0.4131.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4160.1> [rebalance:info,2014-08-19T16:50:18.114,ns_1@10.242.238.88:<0.4131.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:18.116,ns_1@10.242.238.88:<0.4131.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:18.116,ns_1@10.242.238.88:<0.4131.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:18.117,ns_1@10.242.238.88:<0.1547.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 467 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:18.118,ns_1@10.242.238.88:<0.1555.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:18.121,ns_1@10.242.238.88:<0.3000.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 705 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:18.121,ns_1@10.242.238.88:<0.2771.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 708 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:18.121,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 705 state to active [ns_server:info,2014-08-19T16:50:18.122,ns_1@10.242.238.88:<0.1555.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_467_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:18.122,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:18.123,ns_1@10.242.238.88:<0.3000.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 705 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:18.123,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 708 state to active [ns_server:debug,2014-08-19T16:50:18.123,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:18.124,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{465, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:18.124,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.124,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:18.126,ns_1@10.242.238.88:<0.2771.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 708 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:18.127,ns_1@10.242.238.88:<0.3000.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:18.127,ns_1@10.242.238.88:<0.2771.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:18.130,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 465 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:18.130,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 465) [ns_server:debug,2014-08-19T16:50:18.131,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:18.131,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 467 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:18.131,ns_1@10.242.238.88:<0.4181.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 467 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:18.131,ns_1@10.242.238.88:<0.4153.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_469 [rebalance:info,2014-08-19T16:50:18.132,ns_1@10.242.238.88:<0.4153.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[469]}, {checkpoints,[{469,1}]}, {name,<<"rebalance_469">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[469]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"469"}]} [rebalance:debug,2014-08-19T16:50:18.133,ns_1@10.242.238.88:<0.4153.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4182.1> [rebalance:info,2014-08-19T16:50:18.134,ns_1@10.242.238.88:<0.4153.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:18.136,ns_1@10.242.238.88:<0.4153.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:18.136,ns_1@10.242.238.88:<0.4153.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:18.137,ns_1@10.242.238.88:<0.1407.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 469 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:18.138,ns_1@10.242.238.88:<0.1415.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:18.141,ns_1@10.242.238.88:<0.1415.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_469_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:18.143,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.144,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.144,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:18.144,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{467, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:18.144,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.145,ns_1@10.242.238.88:<0.4133.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_468 [rebalance:info,2014-08-19T16:50:18.146,ns_1@10.242.238.88:<0.4133.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[468]}, {checkpoints,[{468,1}]}, {name,<<"rebalance_468">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[468]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"468"}]} [rebalance:debug,2014-08-19T16:50:18.147,ns_1@10.242.238.88:<0.4133.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4193.1> [rebalance:info,2014-08-19T16:50:18.148,ns_1@10.242.238.88:<0.4133.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:18.150,ns_1@10.242.238.88:<0.4133.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:18.150,ns_1@10.242.238.88:<0.4133.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:18.151,ns_1@10.242.238.88:<0.1484.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 468 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:18.151,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 467 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:18.151,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 467) [ns_server:debug,2014-08-19T16:50:18.152,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:18.152,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 469 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:18.152,ns_1@10.242.238.88:<0.4196.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 469 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:debug,2014-08-19T16:50:18.153,ns_1@10.242.238.88:<0.1492.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [views:debug,2014-08-19T16:50:18.153,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/603. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:18.153,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",603,active,0} [ns_server:info,2014-08-19T16:50:18.156,ns_1@10.242.238.88:<0.1492.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_468_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:18.163,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.164,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.164,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:18.164,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.165,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{469, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:18.175,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 469 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:18.176,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 469) [ns_server:debug,2014-08-19T16:50:18.177,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:18.177,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 468 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:18.177,ns_1@10.242.238.88:<0.4208.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 468 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:18.193,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.194,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.194,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:18.194,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.195,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{468, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:18.201,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 468 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:18.201,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 468) [ns_server:debug,2014-08-19T16:50:18.202,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:18.223,ns_1@10.242.238.88:<0.2616.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 710 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:18.223,ns_1@10.242.238.88:<0.2862.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 707 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:18.223,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 710 state to active [rebalance:info,2014-08-19T16:50:18.224,ns_1@10.242.238.88:<0.2616.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 710 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:18.224,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 707 state to active [rebalance:info,2014-08-19T16:50:18.225,ns_1@10.242.238.88:<0.2862.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 707 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:18.226,ns_1@10.242.238.88:<0.2616.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:18.226,ns_1@10.242.238.88:<0.2862.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:18.328,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 601. Nacking mccouch update. [views:debug,2014-08-19T16:50:18.328,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/601. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:18.328,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",601,active,0} [ns_server:debug,2014-08-19T16:50:18.331,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,855,672,544, 178,906,778,723,412,284,957,829,646,518,152,880,697,386,258,1008,931,803,748, 620,254,126,982,854,671,488,360,905,777,722,594,228,956,828,645,462,334,879, 696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176,904,776,721,410, 282,955,827,644,516,150,878,695,384,256,1006,929,801,746,618,252,124,980,852, 669,486,358,903,775,720,592,226,954,826,643,460,332,877,694,566,200,1005,928, 800,745,617,434,306,979,851,668,540,174,902,774,719,408,280,953,825,642,514, 148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667,484,356,901,773, 718,590,224,952,824,641,458,330,875,692,564,198,1003,926,798,743,615,432,304, 977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,691,508,380, 1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588,222,950,822, 767,639,456,328,873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536, 170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740, 612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326, 999,871,688,560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713, 402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972, 844,661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558, 192,920,792,737,609,426,298,971,843,660,532,166,894,711,400,272,1022,945,817, 762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994, 866,683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967, 839,656,528,162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370, 915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757, 629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654,526,160, 888,705,394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785,730,602, 236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444,316,989, 861,678,550,184,912,784,729,601,418,290,963,835,652,524,158,886,703,392,264, 1014,937,809,754,626,132,988,860,677,494,366,911,783,728,600,234,962,834,651, 468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,182,910, 782,727,416,288,961,833,650,522,156,884,701,390,262,1012,935,807,752,624,130, 986,858,675,492,364,909,781,726,598,232,960,832,649,466,338,883,700,572,206, 1011,934,806,751,623,440,312,985,857,674,546,180,908,780,725,414,286,959,831, 648,520,154,882,699,388,260,1010,805,750,128,984,673,362,907,596,230,830,464, 698,1009,932,621,310] [rebalance:info,2014-08-19T16:50:18.332,ns_1@10.242.238.88:<0.2421.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 712 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:18.332,ns_1@10.242.238.88:<0.2694.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 709 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:18.332,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 712 state to active [rebalance:info,2014-08-19T16:50:18.334,ns_1@10.242.238.88:<0.2421.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 712 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:18.335,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 709 state to active [rebalance:info,2014-08-19T16:50:18.336,ns_1@10.242.238.88:<0.2694.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 709 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:18.336,ns_1@10.242.238.88:<0.2421.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:18.337,ns_1@10.242.238.88:<0.2694.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:18.412,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/601. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:18.412,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",601,active,0} [rebalance:info,2014-08-19T16:50:18.415,ns_1@10.242.238.88:<0.2267.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 714 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:18.415,ns_1@10.242.238.88:<0.2539.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 711 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:18.416,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 714 state to active [rebalance:info,2014-08-19T16:50:18.417,ns_1@10.242.238.88:<0.2267.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 714 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:18.417,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 711 state to active [rebalance:info,2014-08-19T16:50:18.418,ns_1@10.242.238.88:<0.2539.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 711 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:18.418,ns_1@10.242.238.88:<0.2267.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:18.419,ns_1@10.242.238.88:<0.2539.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:18.482,ns_1@10.242.238.88:<0.2344.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 713 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:18.482,ns_1@10.242.238.88:<0.2108.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 716 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:18.483,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 713 state to active [rebalance:info,2014-08-19T16:50:18.484,ns_1@10.242.238.88:<0.2344.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 713 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:18.484,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 716 state to active [rebalance:info,2014-08-19T16:50:18.485,ns_1@10.242.238.88:<0.2108.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 716 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:18.485,ns_1@10.242.238.88:<0.2344.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:18.486,ns_1@10.242.238.88:<0.2108.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:18.566,ns_1@10.242.238.88:<0.2190.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 715 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:18.566,ns_1@10.242.238.88:<0.4279.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 718) [ns_server:info,2014-08-19T16:50:18.567,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 715 state to active [rebalance:info,2014-08-19T16:50:18.567,ns_1@10.242.238.88:<0.1936.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:18.568,ns_1@10.242.238.88:<0.2190.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 715 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:18.568,ns_1@10.242.238.88:<0.2190.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:18.571,ns_1@10.242.238.88:<0.1944.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_718_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:18.571,ns_1@10.242.238.88:<0.1936.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:18.573,ns_1@10.242.238.88:<0.1936.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 718 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4286.1> [ns_server:info,2014-08-19T16:50:18.574,ns_1@10.242.238.88:<0.4286.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 718 to state replica [ns_server:debug,2014-08-19T16:50:18.587,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 599. Nacking mccouch update. [views:debug,2014-08-19T16:50:18.587,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/599. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:18.587,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",599,active,0} [ns_server:debug,2014-08-19T16:50:18.590,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,778, 723,412,284,957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126, 982,854,671,488,360,905,777,722,594,228,956,828,645,462,334,879,696,568,202, 1007,930,802,747,619,436,308,981,853,670,542,176,904,776,721,410,282,955,827, 644,516,150,878,695,384,256,1006,929,801,746,618,252,124,980,852,669,486,358, 903,775,720,592,226,954,826,643,460,332,877,694,566,200,1005,928,800,745,617, 434,306,979,851,668,540,174,902,774,719,408,280,953,825,642,514,148,876,693, 510,382,1004,927,799,744,616,250,122,978,850,667,484,356,901,773,718,590,224, 952,824,641,458,330,875,692,564,198,1003,926,798,743,615,432,304,977,849,666, 538,172,900,772,717,406,278,951,823,640,512,146,874,691,508,380,1002,925,797, 742,614,248,120,976,848,665,482,354,899,771,716,588,222,950,822,767,639,456, 328,873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770, 715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740,612,246,118, 974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688, 560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,402,274,947, 819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844,661,478, 350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920,792, 737,609,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762,634,140, 996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893,710,582, 216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607,424,296, 969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683,500, 372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942,814, 759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967,839,656,528, 162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732, 604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,629,446,318, 991,863,680,552,186,914,786,731,603,420,292,965,837,654,526,160,888,705,394, 266,1016,939,811,756,628,134,990,862,679,496,368,913,785,730,602,236,108,964, 836,653,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550, 184,912,784,729,601,418,290,963,835,652,524,158,886,703,392,264,1014,937,809, 754,626,132,988,860,677,494,366,911,783,728,600,234,962,834,651,468,340,885, 702,574,208,1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727,599, 416,288,961,833,650,522,156,884,701,390,262,1012,935,807,752,624,130,986,858, 675,492,364,909,781,726,598,232,960,832,649,466,338,883,700,572,206,1011,934, 806,751,623,440,312,985,857,674,546,180,908,780,725,414,286,959,831,648,520, 154,882,699,388,260,1010,805,750,128,984,673,362,907,596,230,830,464,698, 1009,932,621,310,855,544,178] [rebalance:info,2014-08-19T16:50:18.600,ns_1@10.242.238.88:<0.2017.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 717 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:18.600,ns_1@10.242.238.88:<0.4287.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 720) [ns_server:info,2014-08-19T16:50:18.600,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 717 state to active [rebalance:info,2014-08-19T16:50:18.601,ns_1@10.242.238.88:<0.1760.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:18.601,ns_1@10.242.238.88:<0.2017.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 717 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:18.602,ns_1@10.242.238.88:<0.2017.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:18.604,ns_1@10.242.238.88:<0.1773.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_720_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:18.604,ns_1@10.242.238.88:<0.1760.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:18.607,ns_1@10.242.238.88:<0.1760.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 720 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4294.1> [ns_server:info,2014-08-19T16:50:18.608,ns_1@10.242.238.88:<0.4294.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 720 to state replica [ns_server:debug,2014-08-19T16:50:18.609,ns_1@10.242.238.88:<0.4286.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_718 [rebalance:info,2014-08-19T16:50:18.613,ns_1@10.242.238.88:<0.4286.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[718]}, {checkpoints,[{718,1}]}, {name,<<"rebalance_718">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[718]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"718"}]} [rebalance:debug,2014-08-19T16:50:18.614,ns_1@10.242.238.88:<0.4286.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4295.1> [rebalance:info,2014-08-19T16:50:18.615,ns_1@10.242.238.88:<0.4286.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:18.616,ns_1@10.242.238.88:<0.4286.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:18.616,ns_1@10.242.238.88:<0.4286.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:18.617,ns_1@10.242.238.88:<0.1936.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 718 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:18.619,ns_1@10.242.238.88:<0.1944.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:18.623,ns_1@10.242.238.88:<0.1944.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_718_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:18.623,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 718 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:18.623,ns_1@10.242.238.88:<0.4299.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 718 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:info,2014-08-19T16:50:18.639,ns_1@10.242.238.88:<0.4300.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 722) [rebalance:info,2014-08-19T16:50:18.640,ns_1@10.242.238.88:<0.2918.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 960 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:18.640,ns_1@10.242.238.88:<0.4301.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 721) [rebalance:info,2014-08-19T16:50:18.640,ns_1@10.242.238.88:<0.4302.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 724) [rebalance:info,2014-08-19T16:50:18.640,ns_1@10.242.238.88:<0.4303.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 723) [ns_server:info,2014-08-19T16:50:18.640,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 960 state to active [rebalance:info,2014-08-19T16:50:18.640,ns_1@10.242.238.88:<0.4304.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 719) [rebalance:info,2014-08-19T16:50:18.641,ns_1@10.242.238.88:<0.1603.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:18.641,ns_1@10.242.238.88:<0.1526.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:18.642,ns_1@10.242.238.88:<0.1858.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:18.642,ns_1@10.242.238.88:<0.1449.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:18.641,ns_1@10.242.238.88:<0.1680.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [ns_server:debug,2014-08-19T16:50:18.642,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:18.642,ns_1@10.242.238.88:<0.2918.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 960 on ns_1@10.242.238.88 [ns_server:debug,2014-08-19T16:50:18.642,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:18.643,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{718, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:18.643,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:18.643,ns_1@10.242.238.88:<0.2918.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:18.643,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.644,ns_1@10.242.238.88:<0.4294.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_720 [rebalance:info,2014-08-19T16:50:18.645,ns_1@10.242.238.88:<0.4294.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[720]}, {checkpoints,[{720,1}]}, {name,<<"rebalance_720">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[720]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"720"}]} [ns_server:info,2014-08-19T16:50:18.646,ns_1@10.242.238.88:<0.1534.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_723_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:18.646,ns_1@10.242.238.88:<0.1526.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:debug,2014-08-19T16:50:18.646,ns_1@10.242.238.88:<0.4294.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4322.1> [ns_server:info,2014-08-19T16:50:18.647,ns_1@10.242.238.88:<0.1866.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_719_'ns_1@10.242.238.90'">>] [ns_server:info,2014-08-19T16:50:18.647,ns_1@10.242.238.88:<0.1457.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_724_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:18.647,ns_1@10.242.238.88:<0.1858.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:18.647,ns_1@10.242.238.88:<0.1449.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:18.647,ns_1@10.242.238.88:<0.1611.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_722_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:18.647,ns_1@10.242.238.88:<0.1603.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:18.647,ns_1@10.242.238.88:<0.1688.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_721_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:18.648,ns_1@10.242.238.88:<0.4294.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:18.648,ns_1@10.242.238.88:<0.1680.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:18.650,ns_1@10.242.238.88:<0.1526.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 723 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4328.1> [rebalance:debug,2014-08-19T16:50:18.651,ns_1@10.242.238.88:<0.4294.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:18.651,ns_1@10.242.238.88:<0.4294.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:50:18.651,ns_1@10.242.238.88:<0.4328.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 723 to state replica [rebalance:info,2014-08-19T16:50:18.652,ns_1@10.242.238.88:<0.1760.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 720 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:18.652,ns_1@10.242.238.88:<0.1449.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 724 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4329.1> [ns_server:debug,2014-08-19T16:50:18.652,ns_1@10.242.238.88:<0.1858.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 719 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4330.1> [ns_server:debug,2014-08-19T16:50:18.652,ns_1@10.242.238.88:<0.1603.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 722 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4331.1> [ns_server:debug,2014-08-19T16:50:18.652,ns_1@10.242.238.88:<0.1680.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 721 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4332.1> [ns_server:info,2014-08-19T16:50:18.653,ns_1@10.242.238.88:<0.4331.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 722 to state replica [ns_server:info,2014-08-19T16:50:18.653,ns_1@10.242.238.88:<0.4329.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 724 to state replica [ns_server:info,2014-08-19T16:50:18.653,ns_1@10.242.238.88:<0.4330.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 719 to state replica [ns_server:info,2014-08-19T16:50:18.653,ns_1@10.242.238.88:<0.4332.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 721 to state replica [rebalance:info,2014-08-19T16:50:18.653,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 718 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:debug,2014-08-19T16:50:18.654,ns_1@10.242.238.88:<0.1773.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:18.654,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 718) [ns_server:debug,2014-08-19T16:50:18.655,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:info,2014-08-19T16:50:18.656,ns_1@10.242.238.88:<0.1773.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_720_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:18.657,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 720 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:18.657,ns_1@10.242.238.88:<0.4337.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 720 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [views:debug,2014-08-19T16:50:18.671,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/599. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:18.671,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",599,active,0} [ns_server:debug,2014-08-19T16:50:18.672,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:18.673,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.673,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{720, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:18.673,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.674,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.683,ns_1@10.242.238.88:<0.4328.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_723 [rebalance:info,2014-08-19T16:50:18.684,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 720 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:50:18.685,ns_1@10.242.238.88:<0.4328.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[723]}, {checkpoints,[{723,1}]}, {name,<<"rebalance_723">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[723]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"723"}]} [ns_server:debug,2014-08-19T16:50:18.685,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 720) [rebalance:debug,2014-08-19T16:50:18.686,ns_1@10.242.238.88:<0.4328.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4347.1> [rebalance:info,2014-08-19T16:50:18.686,ns_1@10.242.238.88:<0.4328.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:18.686,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:debug,2014-08-19T16:50:18.689,ns_1@10.242.238.88:<0.4328.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:18.689,ns_1@10.242.238.88:<0.4328.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:18.690,ns_1@10.242.238.88:<0.1526.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 723 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:18.692,ns_1@10.242.238.88:<0.1534.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:18.695,ns_1@10.242.238.88:<0.1534.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_723_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:18.695,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 723 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:18.695,ns_1@10.242.238.88:<0.4351.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 723 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:18.701,ns_1@10.242.238.88:<0.4330.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_719 [rebalance:info,2014-08-19T16:50:18.704,ns_1@10.242.238.88:<0.4330.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[719]}, {checkpoints,[{719,1}]}, {name,<<"rebalance_719">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[719]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"719"}]} [rebalance:debug,2014-08-19T16:50:18.705,ns_1@10.242.238.88:<0.4330.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4352.1> [rebalance:info,2014-08-19T16:50:18.706,ns_1@10.242.238.88:<0.4330.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:18.707,ns_1@10.242.238.88:<0.4330.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:18.708,ns_1@10.242.238.88:<0.4330.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:18.709,ns_1@10.242.238.88:<0.1858.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 719 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:18.710,ns_1@10.242.238.88:<0.2744.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 962 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:18.710,ns_1@10.242.238.88:<0.4353.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 725) [ns_server:info,2014-08-19T16:50:18.711,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 962 state to active [ns_server:debug,2014-08-19T16:50:18.711,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.712,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:18.712,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{723, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:18.713,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:18.713,ns_1@10.242.238.88:<0.2744.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 962 on ns_1@10.242.238.88 [rebalance:debug,2014-08-19T16:50:18.713,ns_1@10.242.238.88:<0.1866.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:18.713,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:18.714,ns_1@10.242.238.88:<0.2744.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:18.714,ns_1@10.242.238.88:<0.1371.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [ns_server:debug,2014-08-19T16:50:18.716,ns_1@10.242.238.88:<0.4332.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_721 [ns_server:info,2014-08-19T16:50:18.717,ns_1@10.242.238.88:<0.1866.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_719_'ns_1@10.242.238.91'">>] [ns_server:info,2014-08-19T16:50:18.717,ns_1@10.242.238.88:<0.1379.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_725_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:18.717,ns_1@10.242.238.88:<0.1371.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:18.718,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 723 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:50:18.718,ns_1@10.242.238.88:<0.4332.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[721]}, {checkpoints,[{721,1}]}, {name,<<"rebalance_721">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[721]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"721"}]} [ns_server:debug,2014-08-19T16:50:18.719,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 723) [rebalance:debug,2014-08-19T16:50:18.719,ns_1@10.242.238.88:<0.4332.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4371.1> [ns_server:debug,2014-08-19T16:50:18.719,ns_1@10.242.238.88:<0.1371.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 725 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4372.1> [ns_server:debug,2014-08-19T16:50:18.720,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:18.720,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 719 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:18.720,ns_1@10.242.238.88:<0.4332.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:18.720,ns_1@10.242.238.88:<0.4374.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 719 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:info,2014-08-19T16:50:18.720,ns_1@10.242.238.88:<0.4372.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 725 to state replica [rebalance:debug,2014-08-19T16:50:18.721,ns_1@10.242.238.88:<0.4332.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:18.721,ns_1@10.242.238.88:<0.4332.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:18.722,ns_1@10.242.238.88:<0.1680.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 721 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:18.724,ns_1@10.242.238.88:<0.1688.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:18.727,ns_1@10.242.238.88:<0.1688.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_721_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:18.732,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.733,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:18.733,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.733,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{719, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:18.733,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.737,ns_1@10.242.238.88:<0.4331.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_722 [rebalance:info,2014-08-19T16:50:18.739,ns_1@10.242.238.88:<0.4331.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[722]}, {checkpoints,[{722,1}]}, {name,<<"rebalance_722">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[722]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"722"}]} [rebalance:info,2014-08-19T16:50:18.740,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 719 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:debug,2014-08-19T16:50:18.740,ns_1@10.242.238.88:<0.4331.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4399.1> [ns_server:debug,2014-08-19T16:50:18.740,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 719) [ns_server:debug,2014-08-19T16:50:18.742,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:18.742,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 721 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:18.742,ns_1@10.242.238.88:<0.4402.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 721 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:info,2014-08-19T16:50:18.743,ns_1@10.242.238.88:<0.4331.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:18.744,ns_1@10.242.238.88:<0.4331.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:18.744,ns_1@10.242.238.88:<0.4331.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:18.745,ns_1@10.242.238.88:<0.1603.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 722 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:18.747,ns_1@10.242.238.88:<0.1611.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:18.750,ns_1@10.242.238.88:<0.1611.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_722_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:18.754,ns_1@10.242.238.88:<0.4329.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_724 [ns_server:debug,2014-08-19T16:50:18.755,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.755,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:50:18.755,ns_1@10.242.238.88:<0.4329.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[724]}, {checkpoints,[{724,1}]}, {name,<<"rebalance_724">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[724]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"724"}]} [ns_server:debug,2014-08-19T16:50:18.756,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{721, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:18.756,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.756,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:18.756,ns_1@10.242.238.88:<0.4329.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4407.1> [rebalance:info,2014-08-19T16:50:18.758,ns_1@10.242.238.88:<0.4329.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:18.760,ns_1@10.242.238.88:<0.4329.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:18.760,ns_1@10.242.238.88:<0.4329.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:18.761,ns_1@10.242.238.88:<0.2827.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 961 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:18.761,ns_1@10.242.238.88:<0.2581.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 964 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:18.762,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 964 state to active [rebalance:info,2014-08-19T16:50:18.762,ns_1@10.242.238.88:<0.1449.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 724 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:18.763,ns_1@10.242.238.88:<0.2581.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 964 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:18.763,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 961 state to active [rebalance:info,2014-08-19T16:50:18.764,ns_1@10.242.238.88:<0.2827.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 961 on ns_1@10.242.238.88 [rebalance:debug,2014-08-19T16:50:18.764,ns_1@10.242.238.88:<0.1457.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:18.764,ns_1@10.242.238.88:<0.2581.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:18.764,ns_1@10.242.238.88:<0.2827.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:18.767,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 721 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:18.768,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 721) [ns_server:info,2014-08-19T16:50:18.769,ns_1@10.242.238.88:<0.1457.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_724_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:18.769,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:18.769,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 722 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:18.769,ns_1@10.242.238.88:<0.4425.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 722 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:18.773,ns_1@10.242.238.88:<0.4372.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_725 [rebalance:info,2014-08-19T16:50:18.774,ns_1@10.242.238.88:<0.4372.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[725]}, {checkpoints,[{725,1}]}, {name,<<"rebalance_725">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[725]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"725"}]} [rebalance:debug,2014-08-19T16:50:18.775,ns_1@10.242.238.88:<0.4372.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4427.1> [rebalance:info,2014-08-19T16:50:18.776,ns_1@10.242.238.88:<0.4372.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:18.778,ns_1@10.242.238.88:<0.4372.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:18.778,ns_1@10.242.238.88:<0.4372.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:18.779,ns_1@10.242.238.88:<0.1371.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 725 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:18.781,ns_1@10.242.238.88:<0.1379.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:18.781,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.782,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.782,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:18.782,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.782,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{722, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:18.787,ns_1@10.242.238.88:<0.1379.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_725_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:18.788,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 722 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:18.788,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 722) [ns_server:debug,2014-08-19T16:50:18.789,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:18.789,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 724 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:18.789,ns_1@10.242.238.88:<0.4439.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 724 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:18.792,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 597. Nacking mccouch update. [views:debug,2014-08-19T16:50:18.792,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/597. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:18.792,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",597,active,0} [ns_server:debug,2014-08-19T16:50:18.795,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,778, 723,412,284,957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126, 982,854,671,488,360,905,777,722,594,228,956,828,645,462,334,879,696,568,202, 1007,930,802,747,619,436,308,981,853,670,542,176,904,776,721,410,282,955,827, 644,516,150,878,695,384,256,1006,929,801,746,618,252,124,980,852,669,486,358, 903,775,720,592,226,954,826,643,460,332,877,694,566,200,1005,928,800,745,617, 434,306,979,851,668,540,174,902,774,719,408,280,953,825,642,514,148,876,693, 510,382,1004,927,799,744,616,250,122,978,850,667,484,356,901,773,718,590,224, 952,824,641,458,330,875,692,564,198,1003,926,798,743,615,432,304,977,849,666, 538,172,900,772,717,406,278,951,823,640,512,146,874,691,508,380,1002,925,797, 742,614,248,120,976,848,665,482,354,899,771,716,588,222,950,822,767,639,456, 328,873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770, 715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740,612,246,118, 974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688, 560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,402,274,947, 819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844,661,478, 350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920,792, 737,609,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762,634,140, 996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893,710,582, 216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607,424,296, 969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683,500, 372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942,814, 759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967,839,656,528, 162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787,732, 604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,629,446,318, 991,863,680,552,186,914,786,731,603,420,292,965,837,654,526,160,888,705,394, 266,1016,939,811,756,628,134,990,862,679,496,368,913,785,730,602,236,108,964, 836,653,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550, 184,912,784,729,601,418,290,963,835,652,524,158,886,703,392,264,1014,937,809, 754,626,132,988,860,677,494,366,911,783,728,600,234,962,834,651,468,340,885, 702,574,208,1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727,599, 416,288,961,833,650,522,156,884,701,390,262,1012,935,807,752,624,130,986,858, 675,492,364,909,781,726,598,232,960,832,649,466,338,883,700,572,206,1011,934, 806,751,623,440,312,985,857,674,546,180,908,780,725,597,414,286,959,831,648, 520,154,882,699,388,260,1010,805,750,128,984,673,362,907,596,230,830,464,698, 1009,932,621,310,855,544,178] [ns_server:debug,2014-08-19T16:50:18.801,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.802,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:18.802,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.802,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{724, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:18.803,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:18.813,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 724 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:18.814,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 724) [ns_server:debug,2014-08-19T16:50:18.815,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:18.815,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 725 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:18.815,ns_1@10.242.238.88:<0.4450.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 725 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [views:debug,2014-08-19T16:50:18.826,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/597. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:18.826,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",597,active,0} [ns_server:debug,2014-08-19T16:50:18.829,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.829,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:18.829,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.830,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:18.830,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{725, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:18.837,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 725 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:18.837,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 725) [ns_server:debug,2014-08-19T16:50:18.839,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:18.885,ns_1@10.242.238.88:<0.2400.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 966 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:18.886,ns_1@10.242.238.88:<0.2659.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 963 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:18.886,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 966 state to active [rebalance:info,2014-08-19T16:50:18.887,ns_1@10.242.238.88:<0.2400.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 966 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:18.887,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 963 state to active [rebalance:info,2014-08-19T16:50:18.888,ns_1@10.242.238.88:<0.2659.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 963 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:18.889,ns_1@10.242.238.88:<0.2400.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:18.889,ns_1@10.242.238.88:<0.2659.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:18.985,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 595. Nacking mccouch update. [views:debug,2014-08-19T16:50:18.985,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/595. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:18.985,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",595,active,0} [rebalance:info,2014-08-19T16:50:18.986,ns_1@10.242.238.88:<0.2246.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 968 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:18.986,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 968 state to active [rebalance:info,2014-08-19T16:50:18.986,ns_1@10.242.238.88:<0.2504.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 965 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:debug,2014-08-19T16:50:18.987,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,778, 723,595,412,284,957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254, 126,982,854,671,488,360,905,777,722,594,228,956,828,645,462,334,879,696,568, 202,1007,930,802,747,619,436,308,981,853,670,542,176,904,776,721,410,282,955, 827,644,516,150,878,695,384,256,1006,929,801,746,618,252,124,980,852,669,486, 358,903,775,720,592,226,954,826,643,460,332,877,694,566,200,1005,928,800,745, 617,434,306,979,851,668,540,174,902,774,719,408,280,953,825,642,514,148,876, 693,510,382,1004,927,799,744,616,250,122,978,850,667,484,356,901,773,718,590, 224,952,824,641,458,330,875,692,564,198,1003,926,798,743,615,432,304,977,849, 666,538,172,900,772,717,406,278,951,823,640,512,146,874,691,508,380,1002,925, 797,742,614,248,120,976,848,665,482,354,899,771,716,588,222,950,822,767,639, 456,328,873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898, 770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740,612,246, 118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871, 688,560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,402,274, 947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844,661, 478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920, 792,737,609,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762,634, 140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893,710, 582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607,424, 296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683, 500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942, 814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967,839,656, 528,162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787, 732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,629,446, 318,991,863,680,552,186,914,786,731,603,420,292,965,837,654,526,160,888,705, 394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785,730,602,236,108, 964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678, 550,184,912,784,729,601,418,290,963,835,652,524,158,886,703,392,264,1014,937, 809,754,626,132,988,860,677,494,366,911,783,728,600,234,962,834,651,468,340, 885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727, 599,416,288,961,833,650,522,156,884,701,390,262,1012,935,807,752,624,130,986, 858,675,492,364,909,781,726,598,232,960,832,649,466,338,883,700,572,206,1011, 934,806,751,623,440,312,985,857,674,546,180,908,780,725,597,414,286,959,831, 648,520,154,882,699,388,260,1010,805,750,128,984,673,362,907,596,230,830,464, 698,1009,932,621,310,855,544,178] [rebalance:info,2014-08-19T16:50:18.989,ns_1@10.242.238.88:<0.2246.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 968 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:18.989,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 965 state to active [rebalance:info,2014-08-19T16:50:18.990,ns_1@10.242.238.88:<0.2504.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 965 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:18.990,ns_1@10.242.238.88:<0.2246.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:18.991,ns_1@10.242.238.88:<0.2504.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:19.068,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/595. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:19.069,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",595,active,0} [rebalance:info,2014-08-19T16:50:19.103,ns_1@10.242.238.88:<0.2073.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 970 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:19.103,ns_1@10.242.238.88:<0.2323.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 967 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:19.103,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 970 state to active [rebalance:info,2014-08-19T16:50:19.105,ns_1@10.242.238.88:<0.2073.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 970 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:19.105,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 967 state to active [rebalance:info,2014-08-19T16:50:19.106,ns_1@10.242.238.88:<0.2323.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 967 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:19.106,ns_1@10.242.238.88:<0.2073.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:19.106,ns_1@10.242.238.88:<0.2323.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:19.156,ns_1@10.242.238.88:<0.20999.0>:ns_orchestrator:handle_info:428]Skipping janitor in state rebalancing [ns_server:debug,2014-08-19T16:50:19.202,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 593. Nacking mccouch update. [views:debug,2014-08-19T16:50:19.202,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/593. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:19.202,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",593,active,0} [ns_server:debug,2014-08-19T16:50:19.204,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,778, 723,595,412,284,957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254, 126,982,854,671,488,360,905,777,722,594,228,956,828,645,462,334,879,696,568, 202,1007,930,802,747,619,436,308,981,853,670,542,176,904,776,721,593,410,282, 955,827,644,516,150,878,695,384,256,1006,929,801,746,618,252,124,980,852,669, 486,358,903,775,720,592,226,954,826,643,460,332,877,694,566,200,1005,928,800, 745,617,434,306,979,851,668,540,174,902,774,719,408,280,953,825,642,514,148, 876,693,510,382,1004,927,799,744,616,250,122,978,850,667,484,356,901,773,718, 590,224,952,824,641,458,330,875,692,564,198,1003,926,798,743,615,432,304,977, 849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,691,508,380,1002, 925,797,742,614,248,120,976,848,665,482,354,899,771,716,588,222,950,822,767, 639,456,328,873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536,170, 898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740,612, 246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999, 871,688,560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,402, 274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844, 661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192, 920,792,737,609,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762, 634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607, 424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866, 683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019, 942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967,839, 656,528,162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915, 787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,629, 446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654,526,160,888, 705,394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785,730,602,236, 108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861, 678,550,184,912,784,729,601,418,290,963,835,652,524,158,886,703,392,264,1014, 937,809,754,626,132,988,860,677,494,366,911,783,728,600,234,962,834,651,468, 340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,182,910,782, 727,599,416,288,961,833,650,522,156,884,701,390,262,1012,935,807,752,624,130, 986,858,675,492,364,909,781,726,598,232,960,832,649,466,338,883,700,572,206, 1011,934,806,751,623,440,312,985,857,674,546,180,908,780,725,597,414,286,959, 831,648,520,154,882,699,388,260,1010,805,750,128,984,673,362,907,596,230,830, 464,698,1009,932,621,310,855,544,178] [rebalance:info,2014-08-19T16:50:19.220,ns_1@10.242.238.88:<0.2155.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 969 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:19.220,ns_1@10.242.238.88:<0.4517.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 972) [ns_server:info,2014-08-19T16:50:19.221,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 969 state to active [rebalance:info,2014-08-19T16:50:19.221,ns_1@10.242.238.88:<0.1915.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:19.222,ns_1@10.242.238.88:<0.2155.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 969 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:19.222,ns_1@10.242.238.88:<0.2155.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:19.224,ns_1@10.242.238.88:<0.1923.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_972_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:19.225,ns_1@10.242.238.88:<0.1915.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:19.226,ns_1@10.242.238.88:<0.1915.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 972 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.4524.1> [ns_server:info,2014-08-19T16:50:19.227,ns_1@10.242.238.88:<0.4524.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 972 to state replica [views:debug,2014-08-19T16:50:19.253,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/593. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:19.254,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",593,active,0} [ns_server:debug,2014-08-19T16:50:19.260,ns_1@10.242.238.88:<0.4524.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_972 [rebalance:info,2014-08-19T16:50:19.261,ns_1@10.242.238.88:<0.4524.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[972]}, {checkpoints,[{972,1}]}, {name,<<"rebalance_972">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[972]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"972"}]} [rebalance:debug,2014-08-19T16:50:19.263,ns_1@10.242.238.88:<0.4524.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4525.1> [rebalance:info,2014-08-19T16:50:19.263,ns_1@10.242.238.88:<0.4524.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:19.265,ns_1@10.242.238.88:<0.4524.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:19.265,ns_1@10.242.238.88:<0.4524.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:19.266,ns_1@10.242.238.88:<0.1915.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 972 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:19.268,ns_1@10.242.238.88:<0.1923.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:19.270,ns_1@10.242.238.88:<0.1996.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 971 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:19.270,ns_1@10.242.238.88:<0.4526.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 974) [ns_server:info,2014-08-19T16:50:19.271,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 971 state to active [rebalance:info,2014-08-19T16:50:19.271,ns_1@10.242.238.88:<0.1730.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:50:19.272,ns_1@10.242.238.88:<0.1923.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_972_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:19.272,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 972 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:19.272,ns_1@10.242.238.88:<0.4530.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 972 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:50:19.272,ns_1@10.242.238.88:<0.1996.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 971 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:19.273,ns_1@10.242.238.88:<0.1996.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:19.274,ns_1@10.242.238.88:<0.1744.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_974_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:19.274,ns_1@10.242.238.88:<0.1730.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:19.276,ns_1@10.242.238.88:<0.1730.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 974 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.4537.1> [ns_server:info,2014-08-19T16:50:19.277,ns_1@10.242.238.88:<0.4537.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 974 to state replica [ns_server:debug,2014-08-19T16:50:19.293,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.294,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:19.294,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{972, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:19.294,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.295,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:19.302,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 972 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:19.303,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 972) [ns_server:debug,2014-08-19T16:50:19.303,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:19.309,ns_1@10.242.238.88:<0.4537.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_974 [rebalance:info,2014-08-19T16:50:19.310,ns_1@10.242.238.88:<0.4537.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[974]}, {checkpoints,[{974,1}]}, {name,<<"rebalance_974">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[974]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"974"}]} [rebalance:debug,2014-08-19T16:50:19.311,ns_1@10.242.238.88:<0.4537.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4561.1> [rebalance:info,2014-08-19T16:50:19.312,ns_1@10.242.238.88:<0.4537.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:19.314,ns_1@10.242.238.88:<0.4537.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:19.314,ns_1@10.242.238.88:<0.4537.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:19.315,ns_1@10.242.238.88:<0.1730.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 974 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:19.317,ns_1@10.242.238.88:<0.1744.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:19.320,ns_1@10.242.238.88:<0.1744.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_974_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:19.320,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 974 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:19.320,ns_1@10.242.238.88:<0.4565.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 974 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:50:19.321,ns_1@10.242.238.88:<0.4566.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 973) [rebalance:info,2014-08-19T16:50:19.321,ns_1@10.242.238.88:<0.4567.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 976) [rebalance:info,2014-08-19T16:50:19.322,ns_1@10.242.238.88:<0.1823.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:19.322,ns_1@10.242.238.88:<0.1582.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:50:19.325,ns_1@10.242.238.88:<0.1831.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_973_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:19.325,ns_1@10.242.238.88:<0.1823.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.326,ns_1@10.242.238.88:<0.1590.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_976_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:19.326,ns_1@10.242.238.88:<0.1582.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:19.328,ns_1@10.242.238.88:<0.1823.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 973 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.4572.1> [ns_server:debug,2014-08-19T16:50:19.328,ns_1@10.242.238.88:<0.1582.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 976 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.4573.1> [ns_server:info,2014-08-19T16:50:19.329,ns_1@10.242.238.88:<0.4573.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 976 to state replica [ns_server:info,2014-08-19T16:50:19.329,ns_1@10.242.238.88:<0.4572.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 973 to state replica [ns_server:debug,2014-08-19T16:50:19.335,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.336,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.336,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:19.336,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.336,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{974, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:19.343,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 974 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:19.343,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 974) [ns_server:debug,2014-08-19T16:50:19.344,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:19.344,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 591. Nacking mccouch update. [views:debug,2014-08-19T16:50:19.345,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/591. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:19.345,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",591,active,0} [ns_server:debug,2014-08-19T16:50:19.347,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,778, 723,595,412,284,957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254, 126,982,854,671,488,360,905,777,722,594,228,956,828,645,462,334,879,696,568, 202,1007,930,802,747,619,436,308,981,853,670,542,176,904,776,721,593,410,282, 955,827,644,516,150,878,695,384,256,1006,929,801,746,618,252,124,980,852,669, 486,358,903,775,720,592,226,954,826,643,460,332,877,694,566,200,1005,928,800, 745,617,434,306,979,851,668,540,174,902,774,719,591,408,280,953,825,642,514, 148,876,693,510,382,1004,927,799,744,616,250,122,978,850,667,484,356,901,773, 718,590,224,952,824,641,458,330,875,692,564,198,1003,926,798,743,615,432,304, 977,849,666,538,172,900,772,717,406,278,951,823,640,512,146,874,691,508,380, 1002,925,797,742,614,248,120,976,848,665,482,354,899,771,716,588,222,950,822, 767,639,456,328,873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536, 170,898,770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740, 612,246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326, 999,871,688,560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713, 402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972, 844,661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558, 192,920,792,737,609,426,298,971,843,660,532,166,894,711,400,272,1022,945,817, 762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994, 866,683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967, 839,656,528,162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370, 915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757, 629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654,526,160, 888,705,394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785,730,602, 236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444,316,989, 861,678,550,184,912,784,729,601,418,290,963,835,652,524,158,886,703,392,264, 1014,937,809,754,626,132,988,860,677,494,366,911,783,728,600,234,962,834,651, 468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,182,910, 782,727,599,416,288,961,833,650,522,156,884,701,390,262,1012,935,807,752,624, 130,986,858,675,492,364,909,781,726,598,232,960,832,649,466,338,883,700,572, 206,1011,934,806,751,623,440,312,985,857,674,546,180,908,780,725,597,414,286, 959,831,648,520,154,882,699,388,260,1010,805,750,128,984,673,362,907,596,230, 830,464,698,1009,932,621,310,855,544,178] [ns_server:debug,2014-08-19T16:50:19.363,ns_1@10.242.238.88:<0.4573.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_976 [rebalance:info,2014-08-19T16:50:19.365,ns_1@10.242.238.88:<0.4573.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[976]}, {checkpoints,[{976,1}]}, {name,<<"rebalance_976">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[976]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"976"}]} [rebalance:debug,2014-08-19T16:50:19.367,ns_1@10.242.238.88:<0.4573.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4583.1> [rebalance:info,2014-08-19T16:50:19.368,ns_1@10.242.238.88:<0.4573.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:19.369,ns_1@10.242.238.88:<0.4573.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:19.369,ns_1@10.242.238.88:<0.4573.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:19.370,ns_1@10.242.238.88:<0.1582.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 976 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:19.372,ns_1@10.242.238.88:<0.1590.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:19.375,ns_1@10.242.238.88:<0.1590.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_976_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:19.376,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 976 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:19.376,ns_1@10.242.238.88:<0.4587.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 976 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:19.380,ns_1@10.242.238.88:<0.4572.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_973 [rebalance:info,2014-08-19T16:50:19.381,ns_1@10.242.238.88:<0.4572.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[973]}, {checkpoints,[{973,1}]}, {name,<<"rebalance_973">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[973]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"973"}]} [rebalance:debug,2014-08-19T16:50:19.382,ns_1@10.242.238.88:<0.4572.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4588.1> [rebalance:info,2014-08-19T16:50:19.383,ns_1@10.242.238.88:<0.4572.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:19.384,ns_1@10.242.238.88:<0.4572.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:19.385,ns_1@10.242.238.88:<0.4572.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:19.386,ns_1@10.242.238.88:<0.1823.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 973 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:19.388,ns_1@10.242.238.88:<0.1831.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:19.392,ns_1@10.242.238.88:<0.1831.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_973_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:19.397,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.398,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.398,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:19.398,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.398,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{976, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:19.409,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 976 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:19.410,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 976) [ns_server:debug,2014-08-19T16:50:19.411,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:19.411,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 973 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:19.411,ns_1@10.242.238.88:<0.4601.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 973 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [views:debug,2014-08-19T16:50:19.412,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/591. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:19.412,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",591,active,0} [ns_server:debug,2014-08-19T16:50:19.426,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.426,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:19.427,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.427,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{973, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:19.427,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:19.434,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 973 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:19.435,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 973) [ns_server:debug,2014-08-19T16:50:19.436,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:19.463,ns_1@10.242.238.88:<0.4611.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 975) [rebalance:info,2014-08-19T16:50:19.463,ns_1@10.242.238.88:<0.4612.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 978) [rebalance:info,2014-08-19T16:50:19.464,ns_1@10.242.238.88:<0.1653.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:19.464,ns_1@10.242.238.88:<0.1428.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:50:19.468,ns_1@10.242.238.88:<0.1667.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_975_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:19.468,ns_1@10.242.238.88:<0.1653.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.468,ns_1@10.242.238.88:<0.1436.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_978_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:19.468,ns_1@10.242.238.88:<0.1428.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:19.471,ns_1@10.242.238.88:<0.1653.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 975 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.4631.1> [ns_server:debug,2014-08-19T16:50:19.471,ns_1@10.242.238.88:<0.1428.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 978 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.4632.1> [ns_server:info,2014-08-19T16:50:19.471,ns_1@10.242.238.88:<0.4631.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 975 to state replica [ns_server:info,2014-08-19T16:50:19.471,ns_1@10.242.238.88:<0.4632.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 978 to state replica [ns_server:debug,2014-08-19T16:50:19.504,ns_1@10.242.238.88:<0.4631.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_975 [rebalance:info,2014-08-19T16:50:19.506,ns_1@10.242.238.88:<0.4631.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[975]}, {checkpoints,[{975,1}]}, {name,<<"rebalance_975">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[975]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"975"}]} [rebalance:debug,2014-08-19T16:50:19.508,ns_1@10.242.238.88:<0.4631.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4633.1> [rebalance:info,2014-08-19T16:50:19.509,ns_1@10.242.238.88:<0.4631.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:19.511,ns_1@10.242.238.88:<0.4631.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:19.511,ns_1@10.242.238.88:<0.4631.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:19.512,ns_1@10.242.238.88:<0.1653.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 975 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:19.513,ns_1@10.242.238.88:<0.1667.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:19.516,ns_1@10.242.238.88:<0.1667.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_975_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:19.517,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 975 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:19.517,ns_1@10.242.238.88:<0.4637.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 975 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:19.520,ns_1@10.242.238.88:<0.4632.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_978 [rebalance:info,2014-08-19T16:50:19.522,ns_1@10.242.238.88:<0.4632.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[978]}, {checkpoints,[{978,1}]}, {name,<<"rebalance_978">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[978]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"978"}]} [rebalance:debug,2014-08-19T16:50:19.523,ns_1@10.242.238.88:<0.4632.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4638.1> [rebalance:info,2014-08-19T16:50:19.524,ns_1@10.242.238.88:<0.4632.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:19.526,ns_1@10.242.238.88:<0.4632.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:19.526,ns_1@10.242.238.88:<0.4632.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:19.527,ns_1@10.242.238.88:<0.1428.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 978 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:19.529,ns_1@10.242.238.88:<0.1436.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:19.532,ns_1@10.242.238.88:<0.1436.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_978_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:19.537,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 589. Nacking mccouch update. [views:debug,2014-08-19T16:50:19.537,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/589. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:19.537,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.537,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",589,active,0} [ns_server:debug,2014-08-19T16:50:19.538,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{975, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:19.539,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:19.540,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854, 671,488,360,905,777,722,594,228,956,828,645,462,334,879,696,568,202,1007,930, 802,747,619,436,308,981,853,670,542,176,904,776,721,593,410,282,955,827,644, 516,150,878,695,384,256,1006,929,801,746,618,252,124,980,852,669,486,358,903, 775,720,592,226,954,826,643,460,332,877,694,566,200,1005,928,800,745,617,434, 306,979,851,668,540,174,902,774,719,591,408,280,953,825,642,514,148,876,693, 510,382,1004,927,799,744,616,250,122,978,850,667,484,356,901,773,718,590,224, 952,824,641,458,330,875,692,564,198,1003,926,798,743,615,432,304,977,849,666, 538,172,900,772,717,589,406,278,951,823,640,512,146,874,691,508,380,1002,925, 797,742,614,248,120,976,848,665,482,354,899,771,716,588,222,950,822,767,639, 456,328,873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898, 770,715,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740,612,246, 118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871, 688,560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,402,274, 947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844,661, 478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920, 792,737,609,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762,634, 140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893,710, 582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607,424, 296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866,683, 500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942, 814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967,839,656, 528,162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915,787, 732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,629,446, 318,991,863,680,552,186,914,786,731,603,420,292,965,837,654,526,160,888,705, 394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785,730,602,236,108, 964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678, 550,184,912,784,729,601,418,290,963,835,652,524,158,886,703,392,264,1014,937, 809,754,626,132,988,860,677,494,366,911,783,728,600,234,962,834,651,468,340, 885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727, 599,416,288,961,833,650,522,156,884,701,390,262,1012,935,807,752,624,130,986, 858,675,492,364,909,781,726,598,232,960,832,649,466,338,883,700,572,206,1011, 934,806,751,623,440,312,985,857,674,546,180,908,780,725,597,414,286,959,831, 648,520,154,882,699,388,260,1010,805,750,128,984,673,362,907,596,230,830,464, 698,1009,932,621,310,855,544,178,778,723,412] [ns_server:debug,2014-08-19T16:50:19.542,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.542,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:19.549,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 975 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:19.549,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 975) [ns_server:debug,2014-08-19T16:50:19.550,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:19.550,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 978 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:19.550,ns_1@10.242.238.88:<0.4651.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 978 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:19.566,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.566,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{978, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:19.568,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.568,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:19.568,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:19.577,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 978 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:19.577,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 978) [ns_server:debug,2014-08-19T16:50:19.578,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [views:debug,2014-08-19T16:50:19.605,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/589. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:19.605,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",589,active,0} [rebalance:info,2014-08-19T16:50:19.684,ns_1@10.242.238.88:<0.4675.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 977) [rebalance:info,2014-08-19T16:50:19.684,ns_1@10.242.238.88:<0.4676.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 979) [rebalance:info,2014-08-19T16:50:19.685,ns_1@10.242.238.88:<0.4677.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 463) [rebalance:info,2014-08-19T16:50:19.685,ns_1@10.242.238.88:<0.4678.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 462) [rebalance:info,2014-08-19T16:50:19.685,ns_1@10.242.238.88:<0.4679.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 448) [rebalance:info,2014-08-19T16:50:19.685,ns_1@10.242.238.88:<0.4680.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 450) [rebalance:info,2014-08-19T16:50:19.686,ns_1@10.242.238.88:<0.4681.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 452) [rebalance:info,2014-08-19T16:50:19.686,ns_1@10.242.238.88:<0.4682.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 449) [rebalance:info,2014-08-19T16:50:19.686,ns_1@10.242.238.88:<0.4683.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 456) [rebalance:info,2014-08-19T16:50:19.686,ns_1@10.242.238.88:<0.1505.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:19.686,ns_1@10.242.238.88:<0.1880.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:19.687,ns_1@10.242.238.88:<0.4684.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 454) [rebalance:info,2014-08-19T16:50:19.687,ns_1@10.242.238.88:<0.4685.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 458) [rebalance:info,2014-08-19T16:50:19.687,ns_1@10.242.238.88:<0.4686.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 459) [rebalance:info,2014-08-19T16:50:19.687,ns_1@10.242.238.88:<0.1957.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:19.687,ns_1@10.242.238.88:<0.4687.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 453) [rebalance:info,2014-08-19T16:50:19.687,ns_1@10.242.238.88:<0.3063.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:19.687,ns_1@10.242.238.88:<0.4688.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 451) [rebalance:info,2014-08-19T16:50:19.687,ns_1@10.242.238.88:<0.4689.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 460) [rebalance:info,2014-08-19T16:50:19.687,ns_1@10.242.238.88:<0.2965.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:19.687,ns_1@10.242.238.88:<0.4690.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 457) [rebalance:info,2014-08-19T16:50:19.687,ns_1@10.242.238.88:<0.4691.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 455) [rebalance:info,2014-08-19T16:50:19.687,ns_1@10.242.238.88:<0.4692.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 712) [rebalance:info,2014-08-19T16:50:19.687,ns_1@10.242.238.88:<0.2806.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:19.688,ns_1@10.242.238.88:<0.4693.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 706) [rebalance:info,2014-08-19T16:50:19.688,ns_1@10.242.238.88:<0.4694.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 707) [rebalance:info,2014-08-19T16:50:19.688,ns_1@10.242.238.88:<0.4696.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 461) [rebalance:info,2014-08-19T16:50:19.688,ns_1@10.242.238.88:<0.4695.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 708) [rebalance:info,2014-08-19T16:50:19.688,ns_1@10.242.238.88:<0.4697.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 710) [rebalance:info,2014-08-19T16:50:19.688,ns_1@10.242.238.88:<0.4698.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 704) [rebalance:info,2014-08-19T16:50:19.688,ns_1@10.242.238.88:<0.2483.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:19.688,ns_1@10.242.238.88:<0.4699.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 709) [rebalance:info,2014-08-19T16:50:19.688,ns_1@10.242.238.88:<0.3021.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:19.688,ns_1@10.242.238.88:<0.4700.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 705) [rebalance:info,2014-08-19T16:50:19.688,ns_1@10.242.238.88:<0.4701.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 715) [rebalance:info,2014-08-19T16:50:19.688,ns_1@10.242.238.88:<0.4702.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 713) [rebalance:info,2014-08-19T16:50:19.688,ns_1@10.242.238.88:<0.2637.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:19.688,ns_1@10.242.238.88:<0.4703.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 714) [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.4704.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 716) [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.4705.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 960) [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.4706.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 711) [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.4707.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 961) [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.2288.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.2421.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.2219.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.4708.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 962) [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.4709.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 717) [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.2939.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.2862.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.2715.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.4710.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 968) [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.4712.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 965) [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.4713.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 963) [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.4714.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 964) [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.2771.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.2883.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.4715.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 970) [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.4716.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 966) [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.4717.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 969) [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.2616.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:19.689,ns_1@10.242.238.88:<0.4718.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 967) [rebalance:info,2014-08-19T16:50:19.690,ns_1@10.242.238.88:<0.4719.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 971) [rebalance:info,2014-08-19T16:50:19.690,ns_1@10.242.238.88:<0.3042.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:19.690,ns_1@10.242.238.88:<0.1350.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:50:19.690,ns_1@10.242.238.88:<0.1513.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_977_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:19.690,ns_1@10.242.238.88:<0.2694.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:19.690,ns_1@10.242.238.88:<0.2918.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:19.690,ns_1@10.242.238.88:<0.3000.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:19.690,ns_1@10.242.238.88:<0.2827.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:19.690,ns_1@10.242.238.88:<0.2744.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:19.690,ns_1@10.242.238.88:<0.1505.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:19.690,ns_1@10.242.238.88:<0.2190.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:19.691,ns_1@10.242.238.88:<0.2344.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:19.691,ns_1@10.242.238.88:<0.2246.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:19.691,ns_1@10.242.238.88:<0.2504.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:19.691,ns_1@10.242.238.88:<0.2134.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:19.691,ns_1@10.242.238.88:<0.2560.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:19.691,ns_1@10.242.238.88:<0.2659.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:19.691,ns_1@10.242.238.88:<0.2365.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:19.691,ns_1@10.242.238.88:<0.2052.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:19.691,ns_1@10.242.238.88:<0.2267.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:19.691,ns_1@10.242.238.88:<0.2581.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:50:19.691,ns_1@10.242.238.88:<0.1965.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_462_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:19.691,ns_1@10.242.238.88:<0.2108.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:19.692,ns_1@10.242.238.88:<0.2073.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:19.692,ns_1@10.242.238.88:<0.2539.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:19.692,ns_1@10.242.238.88:<0.1957.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:19.692,ns_1@10.242.238.88:<0.2017.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [ns_server:info,2014-08-19T16:50:19.692,ns_1@10.242.238.88:<0.1888.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_463_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:19.692,ns_1@10.242.238.88:<0.2400.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:19.692,ns_1@10.242.238.88:<0.1880.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:19.693,ns_1@10.242.238.88:<0.2155.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:19.693,ns_1@10.242.238.88:<0.2323.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:19.693,ns_1@10.242.238.88:<0.1996.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:50:19.693,ns_1@10.242.238.88:<0.3071.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_448_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:19.693,ns_1@10.242.238.88:<0.3063.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.693,ns_1@10.242.238.88:<0.2973.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_450_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:19.694,ns_1@10.242.238.88:<0.2965.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.694,ns_1@10.242.238.88:<0.2814.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_452_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:19.695,ns_1@10.242.238.88:<0.2806.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.696,ns_1@10.242.238.88:<0.2491.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_456_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:19.696,ns_1@10.242.238.88:<0.2483.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.698,ns_1@10.242.238.88:<0.3029.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_449_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:19.698,ns_1@10.242.238.88:<0.3021.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.700,ns_1@10.242.238.88:<0.2296.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_458_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:19.700,ns_1@10.242.238.88:<0.2288.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.701,ns_1@10.242.238.88:<0.2645.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_454_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:19.701,ns_1@10.242.238.88:<0.2637.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.703,ns_1@10.242.238.88:<0.2232.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_459_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:19.703,ns_1@10.242.238.88:<0.2219.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.704,ns_1@10.242.238.88:<0.2723.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_453_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:19.704,ns_1@10.242.238.88:<0.2715.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.705,ns_1@10.242.238.88:<0.2896.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_451_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:19.705,ns_1@10.242.238.88:<0.2883.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.706,ns_1@10.242.238.88:<0.2870.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_707_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:19.706,ns_1@10.242.238.88:<0.2862.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.707,ns_1@10.242.238.88:<0.2429.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_712_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:19.707,ns_1@10.242.238.88:<0.2421.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.707,ns_1@10.242.238.88:<0.2779.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_708_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:19.707,ns_1@10.242.238.88:<0.2771.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.707,ns_1@10.242.238.88:<0.2947.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_706_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:19.707,ns_1@10.242.238.88:<0.2939.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.708,ns_1@10.242.238.88:<0.3050.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_704_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:19.708,ns_1@10.242.238.88:<0.3042.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.708,ns_1@10.242.238.88:<0.2702.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_709_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:19.708,ns_1@10.242.238.88:<0.2694.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.708,ns_1@10.242.238.88:<0.1358.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_979_'ns_1@10.242.238.91'">>] [ns_server:info,2014-08-19T16:50:19.709,ns_1@10.242.238.88:<0.2624.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_710_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:19.709,ns_1@10.242.238.88:<0.1350.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:19.709,ns_1@10.242.238.88:<0.2616.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.709,ns_1@10.242.238.88:<0.2926.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_960_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:19.709,ns_1@10.242.238.88:<0.2918.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.709,ns_1@10.242.238.88:<0.3008.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_705_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:19.710,ns_1@10.242.238.88:<0.3000.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.710,ns_1@10.242.238.88:<0.2835.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_961_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:19.710,ns_1@10.242.238.88:<0.2827.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.710,ns_1@10.242.238.88:<0.2198.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_715_'ns_1@10.242.238.90'">>] [ns_server:info,2014-08-19T16:50:19.710,ns_1@10.242.238.88:<0.2352.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_713_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:19.710,ns_1@10.242.238.88:<0.2190.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:19.710,ns_1@10.242.238.88:<0.2344.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.710,ns_1@10.242.238.88:<0.2373.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_457_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:19.710,ns_1@10.242.238.88:<0.2365.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.711,ns_1@10.242.238.88:<0.2758.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_962_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:19.711,ns_1@10.242.238.88:<0.2744.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.711,ns_1@10.242.238.88:<0.2142.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_460_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:19.711,ns_1@10.242.238.88:<0.2134.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.711,ns_1@10.242.238.88:<0.2568.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_455_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:19.711,ns_1@10.242.238.88:<0.2560.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.712,ns_1@10.242.238.88:<0.2060.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_461_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:19.712,ns_1@10.242.238.88:<0.2052.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.712,ns_1@10.242.238.88:<0.2254.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_968_'ns_1@10.242.238.91'">>] [ns_server:info,2014-08-19T16:50:19.712,ns_1@10.242.238.88:<0.2512.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_965_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:19.712,ns_1@10.242.238.88:<0.2246.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:19.712,ns_1@10.242.238.88:<0.2504.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.712,ns_1@10.242.238.88:<0.2275.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_714_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:19.713,ns_1@10.242.238.88:<0.2267.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.713,ns_1@10.242.238.88:<0.2589.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_964_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:19.713,ns_1@10.242.238.88:<0.2581.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.713,ns_1@10.242.238.88:<0.2667.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_963_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:19.713,ns_1@10.242.238.88:<0.2659.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.713,ns_1@10.242.238.88:<0.2116.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_716_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:19.713,ns_1@10.242.238.88:<0.2108.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.714,ns_1@10.242.238.88:<0.2547.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_711_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:19.714,ns_1@10.242.238.88:<0.2539.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.714,ns_1@10.242.238.88:<0.2081.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_970_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:19.714,ns_1@10.242.238.88:<0.2073.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.714,ns_1@10.242.238.88:<0.2025.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_717_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:19.715,ns_1@10.242.238.88:<0.2017.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.715,ns_1@10.242.238.88:<0.2408.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_966_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:19.715,ns_1@10.242.238.88:<0.2400.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.715,ns_1@10.242.238.88:<0.2163.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_969_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:19.715,ns_1@10.242.238.88:<0.2155.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.715,ns_1@10.242.238.88:<0.2331.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_967_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:19.716,ns_1@10.242.238.88:<0.2323.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:19.716,ns_1@10.242.238.88:<0.2004.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_971_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:19.716,ns_1@10.242.238.88:<0.1996.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:19.718,ns_1@10.242.238.88:<0.1505.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 977 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.4752.1> [ns_server:info,2014-08-19T16:50:19.719,ns_1@10.242.238.88:<0.4752.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 977 to state replica [ns_server:debug,2014-08-19T16:50:19.724,ns_1@10.242.238.88:<0.1957.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 462 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4790.1> [ns_server:info,2014-08-19T16:50:19.724,ns_1@10.242.238.88:<0.4790.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 462 to state replica [ns_server:debug,2014-08-19T16:50:19.729,ns_1@10.242.238.88:<0.1880.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 463 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4809.1> [ns_server:debug,2014-08-19T16:50:19.730,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 587. Nacking mccouch update. [views:debug,2014-08-19T16:50:19.730,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/587. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:19.730,ns_1@10.242.238.88:<0.3063.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 448 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4810.1> [ns_server:info,2014-08-19T16:50:19.730,ns_1@10.242.238.88:<0.4809.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 463 to state replica [ns_server:debug,2014-08-19T16:50:19.730,ns_1@10.242.238.88:<0.2806.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 452 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4812.1> [ns_server:debug,2014-08-19T16:50:19.730,ns_1@10.242.238.88:<0.2965.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 450 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4811.1> [ns_server:debug,2014-08-19T16:50:19.731,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",587,active,0} [ns_server:info,2014-08-19T16:50:19.731,ns_1@10.242.238.88:<0.4812.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 452 to state replica [ns_server:info,2014-08-19T16:50:19.731,ns_1@10.242.238.88:<0.4811.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 450 to state replica [ns_server:info,2014-08-19T16:50:19.731,ns_1@10.242.238.88:<0.4810.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 448 to state replica [ns_server:debug,2014-08-19T16:50:19.732,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854, 671,488,360,905,777,722,594,228,956,828,645,462,334,879,696,568,202,1007,930, 802,747,619,436,308,981,853,670,542,176,904,776,721,593,410,282,955,827,644, 516,150,878,695,384,256,1006,929,801,746,618,252,124,980,852,669,486,358,903, 775,720,592,226,954,826,643,460,332,877,694,566,200,1005,928,800,745,617,434, 306,979,851,668,540,174,902,774,719,591,408,280,953,825,642,514,148,876,693, 510,382,1004,927,799,744,616,250,122,978,850,667,484,356,901,773,718,590,224, 952,824,641,458,330,875,692,564,198,1003,926,798,743,615,432,304,977,849,666, 538,172,900,772,717,589,406,278,951,823,640,512,146,874,691,508,380,1002,925, 797,742,614,248,120,976,848,665,482,354,899,771,716,588,222,950,822,767,639, 456,328,873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898, 770,715,587,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740,612, 246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999, 871,688,560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,402, 274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844, 661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192, 920,792,737,609,426,298,971,843,660,532,166,894,711,400,272,1022,945,817,762, 634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607, 424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994,866, 683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019, 942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967,839, 656,528,162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370,915, 787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,629, 446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654,526,160,888, 705,394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785,730,602,236, 108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861, 678,550,184,912,784,729,601,418,290,963,835,652,524,158,886,703,392,264,1014, 937,809,754,626,132,988,860,677,494,366,911,783,728,600,234,962,834,651,468, 340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,182,910,782, 727,599,416,288,961,833,650,522,156,884,701,390,262,1012,935,807,752,624,130, 986,858,675,492,364,909,781,726,598,232,960,832,649,466,338,883,700,572,206, 1011,934,806,751,623,440,312,985,857,674,546,180,908,780,725,597,414,286,959, 831,648,520,154,882,699,388,260,1010,805,750,128,984,673,362,907,596,230,830, 464,698,1009,932,621,310,855,544,178,778,723,412] [ns_server:debug,2014-08-19T16:50:19.737,ns_1@10.242.238.88:<0.2483.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 456 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4813.1> [ns_server:info,2014-08-19T16:50:19.738,ns_1@10.242.238.88:<0.4813.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 456 to state replica [ns_server:debug,2014-08-19T16:50:19.738,ns_1@10.242.238.88:<0.3021.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 449 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4814.1> [ns_server:debug,2014-08-19T16:50:19.740,ns_1@10.242.238.88:<0.2288.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 458 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4815.1> [ns_server:info,2014-08-19T16:50:19.740,ns_1@10.242.238.88:<0.4814.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 449 to state replica [ns_server:info,2014-08-19T16:50:19.742,ns_1@10.242.238.88:<0.4815.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 458 to state replica [ns_server:debug,2014-08-19T16:50:19.749,ns_1@10.242.238.88:<0.2637.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 454 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4816.1> [ns_server:debug,2014-08-19T16:50:19.749,ns_1@10.242.238.88:<0.2219.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 459 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4817.1> [ns_server:debug,2014-08-19T16:50:19.752,ns_1@10.242.238.88:<0.2862.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 707 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4819.1> [ns_server:debug,2014-08-19T16:50:19.755,ns_1@10.242.238.88:<0.2883.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 451 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4820.1> [ns_server:debug,2014-08-19T16:50:19.755,ns_1@10.242.238.88:<0.2715.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 453 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4818.1> [ns_server:debug,2014-08-19T16:50:19.757,ns_1@10.242.238.88:<0.2771.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 708 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4822.1> [ns_server:debug,2014-08-19T16:50:19.757,ns_1@10.242.238.88:<0.3042.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 704 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4823.1> [ns_server:debug,2014-08-19T16:50:19.757,ns_1@10.242.238.88:<0.2421.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 712 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4821.1> [ns_server:debug,2014-08-19T16:50:19.757,ns_1@10.242.238.88:<0.2694.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 709 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4825.1> [ns_server:debug,2014-08-19T16:50:19.757,ns_1@10.242.238.88:<0.1350.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 979 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.4824.1> [ns_server:debug,2014-08-19T16:50:19.757,ns_1@10.242.238.88:<0.2939.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 706 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4826.1> [ns_server:debug,2014-08-19T16:50:19.757,ns_1@10.242.238.88:<0.2616.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 710 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4827.1> [ns_server:debug,2014-08-19T16:50:19.757,ns_1@10.242.238.88:<0.3000.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 705 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4829.1> [ns_server:debug,2014-08-19T16:50:19.757,ns_1@10.242.238.88:<0.2918.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 960 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.4828.1> [ns_server:debug,2014-08-19T16:50:19.757,ns_1@10.242.238.88:<0.2190.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 715 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4830.1> [ns_server:debug,2014-08-19T16:50:19.757,ns_1@10.242.238.88:<0.2344.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 713 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4832.1> [ns_server:debug,2014-08-19T16:50:19.757,ns_1@10.242.238.88:<0.2827.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 961 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.4831.1> [ns_server:info,2014-08-19T16:50:19.757,ns_1@10.242.238.88:<0.4817.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 459 to state replica [ns_server:info,2014-08-19T16:50:19.757,ns_1@10.242.238.88:<0.4816.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 454 to state replica [ns_server:debug,2014-08-19T16:50:19.757,ns_1@10.242.238.88:<0.2052.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 461 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4833.1> [ns_server:info,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.4819.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 707 to state replica [ns_server:info,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.4821.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 712 to state replica [ns_server:info,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.4822.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 708 to state replica [ns_server:info,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.4820.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 451 to state replica [ns_server:debug,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.2365.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 457 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4834.1> [ns_server:debug,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.2246.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 968 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.4836.1> [ns_server:debug,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.2560.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 455 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4835.1> [ns_server:info,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.4823.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 704 to state replica [ns_server:info,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.4826.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 706 to state replica [ns_server:info,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.4818.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 453 to state replica [ns_server:debug,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.2134.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 460 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.4838.1> [ns_server:debug,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.2744.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 962 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.4837.1> [ns_server:debug,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.2581.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 964 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.4840.1> [ns_server:info,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.4825.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 709 to state replica [ns_server:info,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.4824.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 979 to state replica [ns_server:debug,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.2504.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 965 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.4839.1> [ns_server:info,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.4827.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 710 to state replica [ns_server:debug,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.2267.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 714 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4841.1> [ns_server:debug,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.2108.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 716 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4842.1> [ns_server:debug,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.2073.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 970 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.4843.1> [ns_server:debug,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.2659.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 963 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.4844.1> [ns_server:debug,2014-08-19T16:50:19.758,ns_1@10.242.238.88:<0.2400.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 966 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.4846.1> [ns_server:info,2014-08-19T16:50:19.759,ns_1@10.242.238.88:<0.4828.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 960 to state replica [ns_server:debug,2014-08-19T16:50:19.759,ns_1@10.242.238.88:<0.2017.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 717 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4845.1> [ns_server:debug,2014-08-19T16:50:19.759,ns_1@10.242.238.88:<0.2539.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 711 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.4847.1> [ns_server:info,2014-08-19T16:50:19.759,ns_1@10.242.238.88:<0.4830.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 715 to state replica [ns_server:info,2014-08-19T16:50:19.759,ns_1@10.242.238.88:<0.4829.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 705 to state replica [ns_server:info,2014-08-19T16:50:19.759,ns_1@10.242.238.88:<0.4831.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 961 to state replica [ns_server:debug,2014-08-19T16:50:19.759,ns_1@10.242.238.88:<0.2323.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 967 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.4848.1> [ns_server:info,2014-08-19T16:50:19.759,ns_1@10.242.238.88:<0.4832.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 713 to state replica [ns_server:debug,2014-08-19T16:50:19.759,ns_1@10.242.238.88:<0.1996.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 971 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.4849.1> [ns_server:info,2014-08-19T16:50:19.759,ns_1@10.242.238.88:<0.4833.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 461 to state replica [ns_server:info,2014-08-19T16:50:19.759,ns_1@10.242.238.88:<0.4834.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 457 to state replica [ns_server:debug,2014-08-19T16:50:19.759,ns_1@10.242.238.88:<0.2155.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 969 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.4850.1> [ns_server:info,2014-08-19T16:50:19.759,ns_1@10.242.238.88:<0.4835.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 455 to state replica [ns_server:info,2014-08-19T16:50:19.760,ns_1@10.242.238.88:<0.4836.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 968 to state replica [ns_server:info,2014-08-19T16:50:19.760,ns_1@10.242.238.88:<0.4838.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 460 to state replica [ns_server:info,2014-08-19T16:50:19.760,ns_1@10.242.238.88:<0.4840.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 964 to state replica [ns_server:info,2014-08-19T16:50:19.760,ns_1@10.242.238.88:<0.4841.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 714 to state replica [ns_server:info,2014-08-19T16:50:19.760,ns_1@10.242.238.88:<0.4843.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 970 to state replica [ns_server:info,2014-08-19T16:50:19.760,ns_1@10.242.238.88:<0.4839.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 965 to state replica [ns_server:info,2014-08-19T16:50:19.760,ns_1@10.242.238.88:<0.4842.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 716 to state replica [ns_server:info,2014-08-19T16:50:19.760,ns_1@10.242.238.88:<0.4837.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 962 to state replica [ns_server:info,2014-08-19T16:50:19.760,ns_1@10.242.238.88:<0.4847.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 711 to state replica [ns_server:info,2014-08-19T16:50:19.760,ns_1@10.242.238.88:<0.4845.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 717 to state replica [ns_server:info,2014-08-19T16:50:19.760,ns_1@10.242.238.88:<0.4844.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 963 to state replica [ns_server:info,2014-08-19T16:50:19.760,ns_1@10.242.238.88:<0.4846.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 966 to state replica [ns_server:info,2014-08-19T16:50:19.760,ns_1@10.242.238.88:<0.4848.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 967 to state replica [ns_server:info,2014-08-19T16:50:19.760,ns_1@10.242.238.88:<0.4849.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 971 to state replica [ns_server:info,2014-08-19T16:50:19.760,ns_1@10.242.238.88:<0.4850.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 969 to state replica [ns_server:debug,2014-08-19T16:50:19.766,ns_1@10.242.238.88:<0.4752.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_977 [rebalance:info,2014-08-19T16:50:19.767,ns_1@10.242.238.88:<0.4752.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[977]}, {checkpoints,[{977,1}]}, {name,<<"rebalance_977">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[977]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"977"}]} [rebalance:debug,2014-08-19T16:50:19.767,ns_1@10.242.238.88:<0.4752.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4851.1> [rebalance:info,2014-08-19T16:50:19.773,ns_1@10.242.238.88:<0.4752.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:19.774,ns_1@10.242.238.88:<0.4790.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_462 [rebalance:debug,2014-08-19T16:50:19.775,ns_1@10.242.238.88:<0.4752.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:19.775,ns_1@10.242.238.88:<0.4752.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:19.776,ns_1@10.242.238.88:<0.1505.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 977 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:19.777,ns_1@10.242.238.88:<0.4790.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[462]}, {checkpoints,[{462,1}]}, {name,<<"rebalance_462">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[462]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"462"}]} [rebalance:debug,2014-08-19T16:50:19.777,ns_1@10.242.238.88:<0.4790.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4852.1> [rebalance:debug,2014-08-19T16:50:19.778,ns_1@10.242.238.88:<0.1513.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:19.778,ns_1@10.242.238.88:<0.4790.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:19.780,ns_1@10.242.238.88:<0.4790.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:19.780,ns_1@10.242.238.88:<0.4790.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:19.781,ns_1@10.242.238.88:<0.1957.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 462 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:info,2014-08-19T16:50:19.782,ns_1@10.242.238.88:<0.1513.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_977_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:19.782,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 977 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:19.782,ns_1@10.242.238.88:<0.4856.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 977 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:debug,2014-08-19T16:50:19.783,ns_1@10.242.238.88:<0.1965.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:19.785,ns_1@10.242.238.88:<0.1965.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_462_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:19.790,ns_1@10.242.238.88:<0.4812.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_452 [rebalance:info,2014-08-19T16:50:19.792,ns_1@10.242.238.88:<0.4812.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[452]}, {checkpoints,[{452,1}]}, {name,<<"rebalance_452">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[452]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"452"}]} [rebalance:debug,2014-08-19T16:50:19.793,ns_1@10.242.238.88:<0.4812.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4859.1> [rebalance:info,2014-08-19T16:50:19.793,ns_1@10.242.238.88:<0.4812.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:19.795,ns_1@10.242.238.88:<0.4812.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:19.795,ns_1@10.242.238.88:<0.4812.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:19.797,ns_1@10.242.238.88:<0.2806.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 452 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:19.798,ns_1@10.242.238.88:<0.2814.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:19.801,ns_1@10.242.238.88:<0.2814.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_452_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:19.802,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.803,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{977, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:19.803,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:19.806,ns_1@10.242.238.88:<0.4810.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_448 [ns_server:debug,2014-08-19T16:50:19.806,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.807,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:19.809,ns_1@10.242.238.88:<0.4810.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[448]}, {checkpoints,[{448,1}]}, {name,<<"rebalance_448">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[448]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"448"}]} [rebalance:debug,2014-08-19T16:50:19.810,ns_1@10.242.238.88:<0.4810.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4870.1> [rebalance:info,2014-08-19T16:50:19.811,ns_1@10.242.238.88:<0.4810.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:19.812,ns_1@10.242.238.88:<0.4810.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:19.813,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 977 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:50:19.813,ns_1@10.242.238.88:<0.4810.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:19.813,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 977) [rebalance:info,2014-08-19T16:50:19.813,ns_1@10.242.238.88:<0.3063.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 448 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [views:debug,2014-08-19T16:50:19.814,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/587. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:19.814,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:19.814,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",587,active,0} [rebalance:info,2014-08-19T16:50:19.814,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 462 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:19.814,ns_1@10.242.238.88:<0.4873.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 462 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:debug,2014-08-19T16:50:19.815,ns_1@10.242.238.88:<0.3071.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:19.818,ns_1@10.242.238.88:<0.3071.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_448_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:19.822,ns_1@10.242.238.88:<0.4809.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_463 [rebalance:info,2014-08-19T16:50:19.824,ns_1@10.242.238.88:<0.4809.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[463]}, {checkpoints,[{463,1}]}, {name,<<"rebalance_463">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[463]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"463"}]} [rebalance:debug,2014-08-19T16:50:19.825,ns_1@10.242.238.88:<0.4809.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4876.1> [rebalance:info,2014-08-19T16:50:19.826,ns_1@10.242.238.88:<0.4809.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:19.828,ns_1@10.242.238.88:<0.4809.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:19.828,ns_1@10.242.238.88:<0.4809.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:19.829,ns_1@10.242.238.88:<0.1880.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 463 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:19.830,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:19.830,ns_1@10.242.238.88:<0.1888.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.831,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.831,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:19.831,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.831,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{462, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:19.835,ns_1@10.242.238.88:<0.1888.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_463_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:19.837,ns_1@10.242.238.88:<0.4811.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_450 [rebalance:info,2014-08-19T16:50:19.838,ns_1@10.242.238.88:<0.4811.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[450]}, {checkpoints,[{450,1}]}, {name,<<"rebalance_450">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[450]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"450"}]} [rebalance:debug,2014-08-19T16:50:19.839,ns_1@10.242.238.88:<0.4811.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4887.1> [rebalance:info,2014-08-19T16:50:19.839,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 462 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:19.839,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 462) [rebalance:info,2014-08-19T16:50:19.840,ns_1@10.242.238.88:<0.4811.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:19.840,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:19.840,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 452 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:19.840,ns_1@10.242.238.88:<0.4890.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 452 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:debug,2014-08-19T16:50:19.841,ns_1@10.242.238.88:<0.4811.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:19.841,ns_1@10.242.238.88:<0.4811.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:19.842,ns_1@10.242.238.88:<0.2965.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 450 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:19.846,ns_1@10.242.238.88:<0.2973.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:19.849,ns_1@10.242.238.88:<0.2973.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_450_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:19.851,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.851,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:19.851,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.852,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.852,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{452, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:19.858,ns_1@10.242.238.88:<0.4822.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_708 [rebalance:info,2014-08-19T16:50:19.862,ns_1@10.242.238.88:<0.4822.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[708]}, {checkpoints,[{708,1}]}, {name,<<"rebalance_708">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[708]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"708"}]} [rebalance:info,2014-08-19T16:50:19.863,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 452 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:debug,2014-08-19T16:50:19.864,ns_1@10.242.238.88:<0.4822.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4902.1> [ns_server:debug,2014-08-19T16:50:19.864,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 452) [rebalance:info,2014-08-19T16:50:19.864,ns_1@10.242.238.88:<0.4822.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:19.865,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:19.865,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 448 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:19.865,ns_1@10.242.238.88:<0.4904.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 448 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:debug,2014-08-19T16:50:19.866,ns_1@10.242.238.88:<0.4822.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:19.866,ns_1@10.242.238.88:<0.4822.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:19.867,ns_1@10.242.238.88:<0.2771.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 708 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:19.869,ns_1@10.242.238.88:<0.2779.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.874,ns_1@10.242.238.88:<0.4833.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_461 [rebalance:info,2014-08-19T16:50:19.878,ns_1@10.242.238.88:<0.4833.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[461]}, {checkpoints,[{461,1}]}, {name,<<"rebalance_461">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[461]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"461"}]} [rebalance:debug,2014-08-19T16:50:19.878,ns_1@10.242.238.88:<0.4833.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4913.1> [ns_server:info,2014-08-19T16:50:19.879,ns_1@10.242.238.88:<0.2779.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_708_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:19.879,ns_1@10.242.238.88:<0.4833.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:19.881,ns_1@10.242.238.88:<0.4833.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:19.881,ns_1@10.242.238.88:<0.4833.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:19.882,ns_1@10.242.238.88:<0.2052.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 461 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:19.883,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:19.884,ns_1@10.242.238.88:<0.2060.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.884,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.884,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:19.884,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.885,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{448, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:19.889,ns_1@10.242.238.88:<0.2060.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_461_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:19.891,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 448 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:19.892,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 448) [ns_server:debug,2014-08-19T16:50:19.893,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:19.893,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 463 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:19.893,ns_1@10.242.238.88:<0.4933.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 463 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:19.894,ns_1@10.242.238.88:<0.4832.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_713 [rebalance:info,2014-08-19T16:50:19.895,ns_1@10.242.238.88:<0.4832.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[713]}, {checkpoints,[{713,1}]}, {name,<<"rebalance_713">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[713]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"713"}]} [rebalance:debug,2014-08-19T16:50:19.896,ns_1@10.242.238.88:<0.4832.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4934.1> [rebalance:info,2014-08-19T16:50:19.897,ns_1@10.242.238.88:<0.4832.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:19.899,ns_1@10.242.238.88:<0.4832.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:19.899,ns_1@10.242.238.88:<0.4832.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:19.900,ns_1@10.242.238.88:<0.2344.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 713 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:19.902,ns_1@10.242.238.88:<0.2352.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.904,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.904,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:19.904,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.905,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.905,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{463, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:19.905,ns_1@10.242.238.88:<0.2352.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_713_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:19.914,ns_1@10.242.238.88:<0.4835.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_455 [ns_server:debug,2014-08-19T16:50:19.914,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 585. Nacking mccouch update. [views:debug,2014-08-19T16:50:19.914,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/585. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:19.914,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",585,active,0} [rebalance:info,2014-08-19T16:50:19.915,ns_1@10.242.238.88:<0.4835.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[455]}, {checkpoints,[{455,1}]}, {name,<<"rebalance_455">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[455]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"455"}]} [rebalance:debug,2014-08-19T16:50:19.916,ns_1@10.242.238.88:<0.4835.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4945.1> [ns_server:debug,2014-08-19T16:50:19.916,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854, 671,488,360,905,777,722,594,228,956,828,645,462,334,879,696,568,202,1007,930, 802,747,619,436,308,981,853,670,542,176,904,776,721,593,410,282,955,827,644, 516,150,878,695,384,256,1006,929,801,746,618,252,124,980,852,669,486,358,903, 775,720,592,226,954,826,643,460,332,877,694,566,200,1005,928,800,745,617,434, 306,979,851,668,540,174,902,774,719,591,408,280,953,825,642,514,148,876,693, 510,382,1004,927,799,744,616,250,122,978,850,667,484,356,901,773,718,590,224, 952,824,641,458,330,875,692,564,198,1003,926,798,743,615,432,304,977,849,666, 538,172,900,772,717,589,406,278,951,823,640,512,146,874,691,508,380,1002,925, 797,742,614,248,120,976,848,665,482,354,899,771,716,588,222,950,822,767,639, 456,328,873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898, 770,715,587,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740,612, 246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999, 871,688,560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,585, 402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972, 844,661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558, 192,920,792,737,609,426,298,971,843,660,532,166,894,711,400,272,1022,945,817, 762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138,994, 866,683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967, 839,656,528,162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498,370, 915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757, 629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654,526,160, 888,705,394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785,730,602, 236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444,316,989, 861,678,550,184,912,784,729,601,418,290,963,835,652,524,158,886,703,392,264, 1014,937,809,754,626,132,988,860,677,494,366,911,783,728,600,234,962,834,651, 468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,182,910, 782,727,599,416,288,961,833,650,522,156,884,701,390,262,1012,935,807,752,624, 130,986,858,675,492,364,909,781,726,598,232,960,832,649,466,338,883,700,572, 206,1011,934,806,751,623,440,312,985,857,674,546,180,908,780,725,597,414,286, 959,831,648,520,154,882,699,388,260,1010,805,750,128,984,673,362,907,596,230, 830,464,698,1009,932,621,310,855,544,178,778,723,412] [rebalance:info,2014-08-19T16:50:19.917,ns_1@10.242.238.88:<0.4835.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:19.919,ns_1@10.242.238.88:<0.4835.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:19.919,ns_1@10.242.238.88:<0.4835.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:19.920,ns_1@10.242.238.88:<0.2560.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 455 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:19.921,ns_1@10.242.238.88:<0.2568.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:19.922,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 463 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:19.922,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 463) [ns_server:debug,2014-08-19T16:50:19.923,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:19.923,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 450 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:19.923,ns_1@10.242.238.88:<0.4948.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 450 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:info,2014-08-19T16:50:19.924,ns_1@10.242.238.88:<0.2568.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_455_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:19.930,ns_1@10.242.238.88:<0.4846.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_966 [rebalance:info,2014-08-19T16:50:19.933,ns_1@10.242.238.88:<0.4846.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[966]}, {checkpoints,[{966,1}]}, {name,<<"rebalance_966">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[966]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"966"}]} [rebalance:debug,2014-08-19T16:50:19.933,ns_1@10.242.238.88:<0.4846.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4952.1> [rebalance:info,2014-08-19T16:50:19.934,ns_1@10.242.238.88:<0.4846.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:19.937,ns_1@10.242.238.88:<0.4846.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:19.937,ns_1@10.242.238.88:<0.4846.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:19.938,ns_1@10.242.238.88:<0.2400.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 966 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:19.942,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.943,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.943,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:19.943,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{450, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:19.944,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:19.945,ns_1@10.242.238.88:<0.2408.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.947,ns_1@10.242.238.88:<0.4838.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_460 [rebalance:info,2014-08-19T16:50:19.948,ns_1@10.242.238.88:<0.4838.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[460]}, {checkpoints,[{460,1}]}, {name,<<"rebalance_460">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[460]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"460"}]} [ns_server:info,2014-08-19T16:50:19.949,ns_1@10.242.238.88:<0.2408.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_966_'ns_1@10.242.238.90'">>] [rebalance:debug,2014-08-19T16:50:19.949,ns_1@10.242.238.88:<0.4838.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4962.1> [rebalance:info,2014-08-19T16:50:19.950,ns_1@10.242.238.88:<0.4838.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:19.952,ns_1@10.242.238.88:<0.4838.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:19.952,ns_1@10.242.238.88:<0.4838.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:19.953,ns_1@10.242.238.88:<0.2134.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 460 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:19.954,ns_1@10.242.238.88:<0.2142.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:19.955,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 450 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:19.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 450) [ns_server:debug,2014-08-19T16:50:19.957,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:19.957,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 708 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:19.957,ns_1@10.242.238.88:<0.4967.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 708 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:info,2014-08-19T16:50:19.957,ns_1@10.242.238.88:<0.2142.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_460_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:19.964,ns_1@10.242.238.88:<0.4819.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_707 [rebalance:info,2014-08-19T16:50:19.965,ns_1@10.242.238.88:<0.4819.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[707]}, {checkpoints,[{707,1}]}, {name,<<"rebalance_707">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[707]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"707"}]} [rebalance:debug,2014-08-19T16:50:19.966,ns_1@10.242.238.88:<0.4819.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4969.1> [rebalance:info,2014-08-19T16:50:19.967,ns_1@10.242.238.88:<0.4819.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [views:debug,2014-08-19T16:50:19.974,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/585. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:19.974,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",585,active,0} [rebalance:debug,2014-08-19T16:50:19.974,ns_1@10.242.238.88:<0.4819.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:19.974,ns_1@10.242.238.88:<0.4819.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:19.976,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:50:19.976,ns_1@10.242.238.88:<0.2862.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 707 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:19.976,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.976,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{708, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:19.976,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.976,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:19.977,ns_1@10.242.238.88:<0.2870.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:19.980,ns_1@10.242.238.88:<0.2870.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_707_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:19.982,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 708 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:19.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 708) [ns_server:debug,2014-08-19T16:50:19.984,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:19.984,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 461 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:19.984,ns_1@10.242.238.88:<0.4981.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 461 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:19.986,ns_1@10.242.238.88:<0.4817.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_459 [rebalance:info,2014-08-19T16:50:19.987,ns_1@10.242.238.88:<0.4817.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[459]}, {checkpoints,[{459,1}]}, {name,<<"rebalance_459">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[459]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"459"}]} [rebalance:debug,2014-08-19T16:50:19.988,ns_1@10.242.238.88:<0.4817.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4982.1> [rebalance:info,2014-08-19T16:50:19.989,ns_1@10.242.238.88:<0.4817.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:19.991,ns_1@10.242.238.88:<0.4817.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:19.991,ns_1@10.242.238.88:<0.4817.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:19.992,ns_1@10.242.238.88:<0.2219.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 459 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:19.993,ns_1@10.242.238.88:<0.2232.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:19.995,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.996,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:19.996,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{461, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:19.996,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:19.997,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:19.997,ns_1@10.242.238.88:<0.2232.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_459_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:20.002,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 461 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.003,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 461) [ns_server:debug,2014-08-19T16:50:20.003,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.004,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 713 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:20.004,ns_1@10.242.238.88:<0.4995.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 713 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:20.005,ns_1@10.242.238.88:<0.4837.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_962 [rebalance:info,2014-08-19T16:50:20.008,ns_1@10.242.238.88:<0.4837.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[962]}, {checkpoints,[{962,1}]}, {name,<<"rebalance_962">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[962]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"962"}]} [rebalance:debug,2014-08-19T16:50:20.009,ns_1@10.242.238.88:<0.4837.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.4996.1> [rebalance:info,2014-08-19T16:50:20.010,ns_1@10.242.238.88:<0.4837.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:20.011,ns_1@10.242.238.88:<0.4837.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.011,ns_1@10.242.238.88:<0.4837.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.012,ns_1@10.242.238.88:<0.2744.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 962 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:20.017,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:20.018,ns_1@10.242.238.88:<0.2758.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:20.018,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.018,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.018,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.019,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{713, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:20.022,ns_1@10.242.238.88:<0.2758.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_962_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:20.023,ns_1@10.242.238.88:<0.4834.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_457 [rebalance:info,2014-08-19T16:50:20.025,ns_1@10.242.238.88:<0.4834.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[457]}, {checkpoints,[{457,1}]}, {name,<<"rebalance_457">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[457]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"457"}]} [rebalance:debug,2014-08-19T16:50:20.026,ns_1@10.242.238.88:<0.4834.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5007.1> [rebalance:info,2014-08-19T16:50:20.027,ns_1@10.242.238.88:<0.4834.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:20.029,ns_1@10.242.238.88:<0.4834.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.029,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 713 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:50:20.029,ns_1@10.242.238.88:<0.4834.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:20.030,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 713) [rebalance:info,2014-08-19T16:50:20.030,ns_1@10.242.238.88:<0.2365.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 457 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:20.031,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.031,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 455 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:20.031,ns_1@10.242.238.88:<0.5010.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 455 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:debug,2014-08-19T16:50:20.032,ns_1@10.242.238.88:<0.2373.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:20.035,ns_1@10.242.238.88:<0.2373.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_457_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:20.041,ns_1@10.242.238.88:<0.4827.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_710 [ns_server:debug,2014-08-19T16:50:20.044,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:20.044,ns_1@10.242.238.88:<0.4827.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[710]}, {checkpoints,[{710,1}]}, {name,<<"rebalance_710">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[710]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"710"}]} [ns_server:debug,2014-08-19T16:50:20.044,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.044,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:20.044,ns_1@10.242.238.88:<0.4827.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5015.1> [ns_server:debug,2014-08-19T16:50:20.045,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.045,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{455, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:20.046,ns_1@10.242.238.88:<0.4827.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:20.048,ns_1@10.242.238.88:<0.4827.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.048,ns_1@10.242.238.88:<0.4827.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.049,ns_1@10.242.238.88:<0.2616.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 710 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:20.051,ns_1@10.242.238.88:<0.2624.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:20.051,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 455 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.052,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 455) [ns_server:debug,2014-08-19T16:50:20.053,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.053,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 966 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:20.053,ns_1@10.242.238.88:<0.5023.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 966 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:info,2014-08-19T16:50:20.054,ns_1@10.242.238.88:<0.2624.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_710_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:20.056,ns_1@10.242.238.88:<0.4823.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_704 [rebalance:info,2014-08-19T16:50:20.058,ns_1@10.242.238.88:<0.4823.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[704]}, {checkpoints,[{704,1}]}, {name,<<"rebalance_704">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[704]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"704"}]} [rebalance:debug,2014-08-19T16:50:20.058,ns_1@10.242.238.88:<0.4823.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5027.1> [rebalance:info,2014-08-19T16:50:20.059,ns_1@10.242.238.88:<0.4823.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:20.061,ns_1@10.242.238.88:<0.4823.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.061,ns_1@10.242.238.88:<0.4823.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.064,ns_1@10.242.238.88:<0.3042.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 704 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:20.072,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:20.072,ns_1@10.242.238.88:<0.3050.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:20.072,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.073,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.073,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.073,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{966, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.073,ns_1@10.242.238.88:<0.4825.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_709 [ns_server:info,2014-08-19T16:50:20.076,ns_1@10.242.238.88:<0.3050.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_704_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:20.087,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 966 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.089,ns_1@10.242.238.88:<0.4818.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_453 [ns_server:debug,2014-08-19T16:50:20.092,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 966) [ns_server:debug,2014-08-19T16:50:20.093,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.093,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 460 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:20.093,ns_1@10.242.238.88:<0.5054.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 460 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:info,2014-08-19T16:50:20.093,ns_1@10.242.238.88:<0.4818.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[453]}, {checkpoints,[{453,1}]}, {name,<<"rebalance_453">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[453]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"453"}]} [rebalance:info,2014-08-19T16:50:20.093,ns_1@10.242.238.88:<0.4825.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[709]}, {checkpoints,[{709,1}]}, {name,<<"rebalance_709">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[709]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"709"}]} [rebalance:debug,2014-08-19T16:50:20.094,ns_1@10.242.238.88:<0.4825.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5055.1> [rebalance:debug,2014-08-19T16:50:20.094,ns_1@10.242.238.88:<0.4818.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5056.1> [rebalance:info,2014-08-19T16:50:20.095,ns_1@10.242.238.88:<0.4825.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:20.095,ns_1@10.242.238.88:<0.4818.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:20.096,ns_1@10.242.238.88:<0.4825.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.096,ns_1@10.242.238.88:<0.4825.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:debug,2014-08-19T16:50:20.097,ns_1@10.242.238.88:<0.4818.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.097,ns_1@10.242.238.88:<0.4818.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.098,ns_1@10.242.238.88:<0.2694.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 709 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:20.098,ns_1@10.242.238.88:<0.2715.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 453 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:20.099,ns_1@10.242.238.88:<0.2702.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:50:20.100,ns_1@10.242.238.88:<0.2723.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:20.103,ns_1@10.242.238.88:<0.2702.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_709_'ns_1@10.242.238.91'">>] [ns_server:info,2014-08-19T16:50:20.103,ns_1@10.242.238.88:<0.2723.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_453_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:20.105,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.105,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.106,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.106,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.106,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{460, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.108,ns_1@10.242.238.88:<0.4816.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_454 [rebalance:info,2014-08-19T16:50:20.109,ns_1@10.242.238.88:<0.4816.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[454]}, {checkpoints,[{454,1}]}, {name,<<"rebalance_454">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[454]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"454"}]} [rebalance:debug,2014-08-19T16:50:20.110,ns_1@10.242.238.88:<0.4816.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5068.1> [rebalance:info,2014-08-19T16:50:20.111,ns_1@10.242.238.88:<0.4816.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:20.112,ns_1@10.242.238.88:<0.4816.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.112,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 460 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:50:20.112,ns_1@10.242.238.88:<0.4816.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:20.113,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 460) [ns_server:debug,2014-08-19T16:50:20.113,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.113,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 707 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:20.114,ns_1@10.242.238.88:<0.5071.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 707 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:info,2014-08-19T16:50:20.114,ns_1@10.242.238.88:<0.2637.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 454 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:20.115,ns_1@10.242.238.88:<0.2645.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:20.118,ns_1@10.242.238.88:<0.2645.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_454_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:20.121,ns_1@10.242.238.88:<0.4831.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_961 [rebalance:info,2014-08-19T16:50:20.123,ns_1@10.242.238.88:<0.4831.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[961]}, {checkpoints,[{961,1}]}, {name,<<"rebalance_961">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[961]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"961"}]} [rebalance:debug,2014-08-19T16:50:20.124,ns_1@10.242.238.88:<0.4831.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5074.1> [rebalance:info,2014-08-19T16:50:20.125,ns_1@10.242.238.88:<0.4831.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:20.127,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:20.127,ns_1@10.242.238.88:<0.4831.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.127,ns_1@10.242.238.88:<0.4831.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:20.127,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.128,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.128,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:20.128,ns_1@10.242.238.88:<0.2827.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 961 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:20.128,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{707, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:20.131,ns_1@10.242.238.88:<0.2835.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:20.135,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 707 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:info,2014-08-19T16:50:20.135,ns_1@10.242.238.88:<0.2835.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_961_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:20.135,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 707) [ns_server:debug,2014-08-19T16:50:20.136,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.137,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 459 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:20.137,ns_1@10.242.238.88:<0.5087.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 459 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:20.138,ns_1@10.242.238.88:<0.4824.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_979 [rebalance:info,2014-08-19T16:50:20.139,ns_1@10.242.238.88:<0.4824.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[979]}, {checkpoints,[{979,1}]}, {name,<<"rebalance_979">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[979]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"979"}]} [rebalance:debug,2014-08-19T16:50:20.140,ns_1@10.242.238.88:<0.4824.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5088.1> [rebalance:info,2014-08-19T16:50:20.141,ns_1@10.242.238.88:<0.4824.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:20.142,ns_1@10.242.238.88:<0.4824.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.142,ns_1@10.242.238.88:<0.4824.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.143,ns_1@10.242.238.88:<0.1350.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 979 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:20.148,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 583. Nacking mccouch update. [views:debug,2014-08-19T16:50:20.148,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/583. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:20.148,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",583,active,0} [ns_server:debug,2014-08-19T16:50:20.150,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854, 671,488,360,905,777,722,594,228,956,828,645,462,334,879,696,568,202,1007,930, 802,747,619,436,308,981,853,670,542,176,904,776,721,593,410,282,955,827,644, 516,150,878,695,384,256,1006,929,801,746,618,252,124,980,852,669,486,358,903, 775,720,592,226,954,826,643,460,332,877,694,566,200,1005,928,800,745,617,434, 306,979,851,668,540,174,902,774,719,591,408,280,953,825,642,514,148,876,693, 510,382,1004,927,799,744,616,250,122,978,850,667,484,356,901,773,718,590,224, 952,824,641,458,330,875,692,564,198,1003,926,798,743,615,432,304,977,849,666, 538,172,900,772,717,589,406,278,951,823,640,512,146,874,691,508,380,1002,925, 797,742,614,248,120,976,848,665,482,354,899,771,716,588,222,950,822,767,639, 456,328,873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898, 770,715,587,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740,612, 246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999, 871,688,560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,585, 402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972, 844,661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558, 192,920,792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945, 817,762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476, 348,893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790, 735,607,424,296,969,841,658,530,164,892,709,398,270,1020,943,815,760,632,138, 994,866,683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580, 214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294, 967,839,656,528,162,890,707,396,268,1018,941,813,758,630,136,992,864,681,498, 370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812, 757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654,526, 160,888,705,394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785,730, 602,236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444,316, 989,861,678,550,184,912,784,729,601,418,290,963,835,652,524,158,886,703,392, 264,1014,937,809,754,626,132,988,860,677,494,366,911,783,728,600,234,962,834, 651,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,182, 910,782,727,599,416,288,961,833,650,522,156,884,701,390,262,1012,935,807,752, 624,130,986,858,675,492,364,909,781,726,598,232,960,832,649,466,338,883,700, 572,206,1011,934,806,751,623,440,312,985,857,674,546,180,908,780,725,597,414, 286,959,831,648,520,154,882,699,388,260,1010,805,750,128,984,673,362,907,596, 230,830,464,698,1009,932,621,310,855,544,178,778,723,412] [rebalance:debug,2014-08-19T16:50:20.152,ns_1@10.242.238.88:<0.1358.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:20.154,ns_1@10.242.238.88:<0.4844.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_963 [rebalance:info,2014-08-19T16:50:20.155,ns_1@10.242.238.88:<0.4844.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[963]}, {checkpoints,[{963,1}]}, {name,<<"rebalance_963">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[963]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"963"}]} [ns_server:debug,2014-08-19T16:50:20.156,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:20.156,ns_1@10.242.238.88:<0.4844.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5091.1> [ns_server:debug,2014-08-19T16:50:20.157,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.157,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{459, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.157,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:50:20.157,ns_1@10.242.238.88:<0.4844.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:20.158,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:20.159,ns_1@10.242.238.88:<0.4844.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.159,ns_1@10.242.238.88:<0.4844.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:50:20.160,ns_1@10.242.238.88:<0.1358.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_979_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:20.160,ns_1@10.242.238.88:<0.2659.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 963 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:20.162,ns_1@10.242.238.88:<0.2667.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:20.167,ns_1@10.242.238.88:<0.2667.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_963_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:20.167,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 459 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.168,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 459) [ns_server:debug,2014-08-19T16:50:20.168,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.168,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 962 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:20.169,ns_1@10.242.238.88:<0.5103.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 962 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:20.171,ns_1@10.242.238.88:<0.4820.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_451 [rebalance:info,2014-08-19T16:50:20.172,ns_1@10.242.238.88:<0.4820.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[451]}, {checkpoints,[{451,1}]}, {name,<<"rebalance_451">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[451]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"451"}]} [rebalance:debug,2014-08-19T16:50:20.173,ns_1@10.242.238.88:<0.4820.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5104.1> [rebalance:info,2014-08-19T16:50:20.174,ns_1@10.242.238.88:<0.4820.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:20.176,ns_1@10.242.238.88:<0.4820.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.176,ns_1@10.242.238.88:<0.4820.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.177,ns_1@10.242.238.88:<0.2883.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 451 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:20.179,ns_1@10.242.238.88:<0.2896.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:20.183,ns_1@10.242.238.88:<0.2896.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_451_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:20.191,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.191,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.191,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.192,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.192,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{962, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.193,ns_1@10.242.238.88:<0.4828.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_960 [rebalance:info,2014-08-19T16:50:20.195,ns_1@10.242.238.88:<0.4828.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[960]}, {checkpoints,[{960,1}]}, {name,<<"rebalance_960">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[960]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"960"}]} [rebalance:debug,2014-08-19T16:50:20.196,ns_1@10.242.238.88:<0.4828.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5115.1> [rebalance:info,2014-08-19T16:50:20.197,ns_1@10.242.238.88:<0.4828.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:20.198,ns_1@10.242.238.88:<0.4828.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.198,ns_1@10.242.238.88:<0.4828.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.199,ns_1@10.242.238.88:<0.2918.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 960 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:20.201,ns_1@10.242.238.88:<0.2926.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:20.203,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 962 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.204,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 962) [ns_server:info,2014-08-19T16:50:20.204,ns_1@10.242.238.88:<0.2926.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_960_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:20.205,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.205,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 457 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:20.205,ns_1@10.242.238.88:<0.5120.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 457 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:20.205,ns_1@10.242.238.88:<0.4830.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_715 [rebalance:info,2014-08-19T16:50:20.206,ns_1@10.242.238.88:<0.4830.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[715]}, {checkpoints,[{715,1}]}, {name,<<"rebalance_715">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[715]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"715"}]} [rebalance:debug,2014-08-19T16:50:20.207,ns_1@10.242.238.88:<0.4830.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5121.1> [rebalance:info,2014-08-19T16:50:20.208,ns_1@10.242.238.88:<0.4830.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [views:debug,2014-08-19T16:50:20.208,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/583. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:20.209,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",583,active,0} [rebalance:debug,2014-08-19T16:50:20.210,ns_1@10.242.238.88:<0.4830.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.210,ns_1@10.242.238.88:<0.4830.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.211,ns_1@10.242.238.88:<0.2190.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 715 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:20.212,ns_1@10.242.238.88:<0.2198.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:20.216,ns_1@10.242.238.88:<0.2198.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_715_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:20.216,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.217,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.217,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.217,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{457, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.217,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.224,ns_1@10.242.238.88:<0.4829.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_705 [rebalance:info,2014-08-19T16:50:20.226,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 457 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:50:20.226,ns_1@10.242.238.88:<0.4829.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[705]}, {checkpoints,[{705,1}]}, {name,<<"rebalance_705">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[705]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"705"}]} [ns_server:debug,2014-08-19T16:50:20.227,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 457) [rebalance:debug,2014-08-19T16:50:20.227,ns_1@10.242.238.88:<0.4829.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5133.1> [ns_server:debug,2014-08-19T16:50:20.228,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.228,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 710 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:20.228,ns_1@10.242.238.88:<0.4829.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:20.228,ns_1@10.242.238.88:<0.5135.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 710 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:debug,2014-08-19T16:50:20.229,ns_1@10.242.238.88:<0.4829.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.230,ns_1@10.242.238.88:<0.4829.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.231,ns_1@10.242.238.88:<0.3000.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 705 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:20.232,ns_1@10.242.238.88:<0.3008.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:20.235,ns_1@10.242.238.88:<0.3008.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_705_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:20.241,ns_1@10.242.238.88:<0.4841.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_714 [ns_server:debug,2014-08-19T16:50:20.241,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.242,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.242,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.242,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{710, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:20.242,ns_1@10.242.238.88:<0.4841.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[714]}, {checkpoints,[{714,1}]}, {name,<<"rebalance_714">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[714]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"714"}]} [ns_server:debug,2014-08-19T16:50:20.243,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:20.243,ns_1@10.242.238.88:<0.4841.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5141.1> [rebalance:info,2014-08-19T16:50:20.245,ns_1@10.242.238.88:<0.4841.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:20.247,ns_1@10.242.238.88:<0.4841.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.247,ns_1@10.242.238.88:<0.4841.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.248,ns_1@10.242.238.88:<0.2267.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 714 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:20.253,ns_1@10.242.238.88:<0.2275.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:20.254,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 710 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.254,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 710) [ns_server:debug,2014-08-19T16:50:20.256,ns_1@10.242.238.88:<0.4836.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_968 [ns_server:debug,2014-08-19T16:50:20.256,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.256,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 704 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:20.256,ns_1@10.242.238.88:<0.5149.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 704 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:info,2014-08-19T16:50:20.257,ns_1@10.242.238.88:<0.2275.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_714_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:20.258,ns_1@10.242.238.88:<0.4836.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[968]}, {checkpoints,[{968,1}]}, {name,<<"rebalance_968">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[968]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"968"}]} [rebalance:debug,2014-08-19T16:50:20.259,ns_1@10.242.238.88:<0.4836.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5152.1> [rebalance:info,2014-08-19T16:50:20.259,ns_1@10.242.238.88:<0.4836.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:20.261,ns_1@10.242.238.88:<0.4836.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.261,ns_1@10.242.238.88:<0.4836.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.263,ns_1@10.242.238.88:<0.2246.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 968 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:20.269,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:20.269,ns_1@10.242.238.88:<0.2254.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:20.269,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.270,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.270,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.270,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{704, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:20.272,ns_1@10.242.238.88:<0.2254.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_968_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:20.274,ns_1@10.242.238.88:<0.4840.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_964 [rebalance:info,2014-08-19T16:50:20.275,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 704 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.275,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 704) [ns_server:debug,2014-08-19T16:50:20.276,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.276,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 709 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:20.276,ns_1@10.242.238.88:<0.4840.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[964]}, {checkpoints,[{964,1}]}, {name,<<"rebalance_964">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[964]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"964"}]} [rebalance:info,2014-08-19T16:50:20.276,ns_1@10.242.238.88:<0.5164.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 709 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:debug,2014-08-19T16:50:20.277,ns_1@10.242.238.88:<0.4840.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5165.1> [rebalance:info,2014-08-19T16:50:20.278,ns_1@10.242.238.88:<0.4840.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:20.279,ns_1@10.242.238.88:<0.4840.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.280,ns_1@10.242.238.88:<0.4840.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.280,ns_1@10.242.238.88:<0.2581.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 964 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:20.291,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:20.291,ns_1@10.242.238.88:<0.2589.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:20.291,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.291,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.292,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{709, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.292,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.294,ns_1@10.242.238.88:<0.4842.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_716 [ns_server:info,2014-08-19T16:50:20.294,ns_1@10.242.238.88:<0.2589.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_964_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:20.295,ns_1@10.242.238.88:<0.4842.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[716]}, {checkpoints,[{716,1}]}, {name,<<"rebalance_716">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[716]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"716"}]} [rebalance:debug,2014-08-19T16:50:20.296,ns_1@10.242.238.88:<0.4842.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5190.1> [rebalance:info,2014-08-19T16:50:20.297,ns_1@10.242.238.88:<0.4842.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:20.298,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 709 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:debug,2014-08-19T16:50:20.299,ns_1@10.242.238.88:<0.4842.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.299,ns_1@10.242.238.88:<0.4842.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:20.299,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 709) [ns_server:debug,2014-08-19T16:50:20.300,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.300,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 453 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:20.300,ns_1@10.242.238.88:<0.2108.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 716 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:20.300,ns_1@10.242.238.88:<0.5193.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 453 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:debug,2014-08-19T16:50:20.302,ns_1@10.242.238.88:<0.2116.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:20.305,ns_1@10.242.238.88:<0.2116.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_716_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:20.310,ns_1@10.242.238.88:<0.4826.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_706 [ns_server:debug,2014-08-19T16:50:20.312,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:20.312,ns_1@10.242.238.88:<0.4826.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[706]}, {checkpoints,[{706,1}]}, {name,<<"rebalance_706">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[706]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"706"}]} [ns_server:debug,2014-08-19T16:50:20.313,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.313,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:debug,2014-08-19T16:50:20.313,ns_1@10.242.238.88:<0.4826.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5199.1> [ns_server:debug,2014-08-19T16:50:20.313,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.313,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{453, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:20.314,ns_1@10.242.238.88:<0.4826.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:20.316,ns_1@10.242.238.88:<0.4826.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.316,ns_1@10.242.238.88:<0.4826.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.317,ns_1@10.242.238.88:<0.2939.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 706 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:20.319,ns_1@10.242.238.88:<0.2947.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:20.320,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 453 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.322,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.322,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 454 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:20.322,ns_1@10.242.238.88:<0.5207.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 454 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:info,2014-08-19T16:50:20.325,ns_1@10.242.238.88:<0.2947.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_706_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:20.325,ns_1@10.242.238.88:<0.4845.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_717 [ns_server:debug,2014-08-19T16:50:20.326,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 453) [rebalance:info,2014-08-19T16:50:20.329,ns_1@10.242.238.88:<0.4845.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[717]}, {checkpoints,[{717,1}]}, {name,<<"rebalance_717">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[717]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"717"}]} [rebalance:debug,2014-08-19T16:50:20.329,ns_1@10.242.238.88:<0.4845.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5210.1> [rebalance:info,2014-08-19T16:50:20.330,ns_1@10.242.238.88:<0.4845.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:20.332,ns_1@10.242.238.88:<0.4845.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.333,ns_1@10.242.238.88:<0.4845.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.333,ns_1@10.242.238.88:<0.2017.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 717 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:20.334,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.335,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.335,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{454, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.335,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.335,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:20.335,ns_1@10.242.238.88:<0.2025.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:20.339,ns_1@10.242.238.88:<0.2025.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_717_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:20.346,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 454 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.346,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 454) [ns_server:debug,2014-08-19T16:50:20.346,ns_1@10.242.238.88:<0.4850.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_969 [ns_server:debug,2014-08-19T16:50:20.347,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.347,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 961 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:20.347,ns_1@10.242.238.88:<0.5223.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 961 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:50:20.349,ns_1@10.242.238.88:<0.4850.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[969]}, {checkpoints,[{969,1}]}, {name,<<"rebalance_969">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[969]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"969"}]} [rebalance:debug,2014-08-19T16:50:20.349,ns_1@10.242.238.88:<0.4850.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5224.1> [rebalance:info,2014-08-19T16:50:20.350,ns_1@10.242.238.88:<0.4850.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:20.350,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 581. Nacking mccouch update. [views:debug,2014-08-19T16:50:20.350,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/581. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:20.351,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",581,active,0} [rebalance:debug,2014-08-19T16:50:20.352,ns_1@10.242.238.88:<0.4850.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.352,ns_1@10.242.238.88:<0.4850.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:20.352,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,957,829,646,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854, 671,488,360,905,777,722,594,228,956,828,645,462,334,879,696,568,202,1007,930, 802,747,619,436,308,981,853,670,542,176,904,776,721,593,410,282,955,827,644, 516,150,878,695,384,256,1006,929,801,746,618,252,124,980,852,669,486,358,903, 775,720,592,226,954,826,643,460,332,877,694,566,200,1005,928,800,745,617,434, 306,979,851,668,540,174,902,774,719,591,408,280,953,825,642,514,148,876,693, 510,382,1004,927,799,744,616,250,122,978,850,667,484,356,901,773,718,590,224, 952,824,641,458,330,875,692,564,198,1003,926,798,743,615,432,304,977,849,666, 538,172,900,772,717,589,406,278,951,823,640,512,146,874,691,508,380,1002,925, 797,742,614,248,120,976,848,665,482,354,899,771,716,588,222,950,822,767,639, 456,328,873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898, 770,715,587,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740,612, 246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999, 871,688,560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,585, 402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972, 844,661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558, 192,920,792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945, 817,762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476, 348,893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790, 735,607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632, 138,994,866,683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708, 580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422, 294,967,839,656,528,162,890,707,396,268,1018,941,813,758,630,136,992,864,681, 498,370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940, 812,757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654, 526,160,888,705,394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785, 730,602,236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444, 316,989,861,678,550,184,912,784,729,601,418,290,963,835,652,524,158,886,703, 392,264,1014,937,809,754,626,132,988,860,677,494,366,911,783,728,600,234,962, 834,651,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548, 182,910,782,727,599,416,288,961,833,650,522,156,884,701,390,262,1012,935,807, 752,624,130,986,858,675,492,364,909,781,726,598,232,960,832,649,466,338,883, 700,572,206,1011,934,806,751,623,440,312,985,857,674,546,180,908,780,725,597, 414,286,959,831,648,520,154,882,699,388,260,1010,805,750,128,984,673,362,907, 596,230,830,464,698,1009,932,621,310,855,544,178,778,723,412] [rebalance:info,2014-08-19T16:50:20.354,ns_1@10.242.238.88:<0.2155.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 969 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:20.356,ns_1@10.242.238.88:<0.2163.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:20.361,ns_1@10.242.238.88:<0.2163.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_969_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:20.364,ns_1@10.242.238.88:<0.4815.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_458 [rebalance:info,2014-08-19T16:50:20.366,ns_1@10.242.238.88:<0.4815.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[458]}, {checkpoints,[{458,1}]}, {name,<<"rebalance_458">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[458]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"458"}]} [rebalance:debug,2014-08-19T16:50:20.367,ns_1@10.242.238.88:<0.4815.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5227.1> [rebalance:info,2014-08-19T16:50:20.368,ns_1@10.242.238.88:<0.4815.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:20.369,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.369,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.369,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.370,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.370,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{961, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:20.370,ns_1@10.242.238.88:<0.4815.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.370,ns_1@10.242.238.88:<0.4815.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.371,ns_1@10.242.238.88:<0.2288.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 458 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:20.373,ns_1@10.242.238.88:<0.2296.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:20.376,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 961 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:info,2014-08-19T16:50:20.376,ns_1@10.242.238.88:<0.2296.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_458_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:20.376,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 961) [ns_server:debug,2014-08-19T16:50:20.377,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.377,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 979 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:20.377,ns_1@10.242.238.88:<0.5240.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 979 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:20.380,ns_1@10.242.238.88:<0.4843.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_970 [rebalance:info,2014-08-19T16:50:20.381,ns_1@10.242.238.88:<0.4843.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[970]}, {checkpoints,[{970,1}]}, {name,<<"rebalance_970">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[970]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"970"}]} [rebalance:debug,2014-08-19T16:50:20.382,ns_1@10.242.238.88:<0.4843.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5241.1> [rebalance:info,2014-08-19T16:50:20.387,ns_1@10.242.238.88:<0.4843.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:20.389,ns_1@10.242.238.88:<0.4843.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.389,ns_1@10.242.238.88:<0.4843.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.390,ns_1@10.242.238.88:<0.2073.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 970 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:20.392,ns_1@10.242.238.88:<0.2081.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:20.393,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.393,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.393,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.393,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{979, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.394,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:20.396,ns_1@10.242.238.88:<0.2081.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_970_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:20.401,ns_1@10.242.238.88:<0.4813.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_456 [rebalance:info,2014-08-19T16:50:20.402,ns_1@10.242.238.88:<0.4813.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[456]}, {checkpoints,[{456,1}]}, {name,<<"rebalance_456">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[456]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"456"}]} [rebalance:debug,2014-08-19T16:50:20.403,ns_1@10.242.238.88:<0.4813.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5251.1> [rebalance:info,2014-08-19T16:50:20.404,ns_1@10.242.238.88:<0.4813.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:20.405,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 979 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:debug,2014-08-19T16:50:20.406,ns_1@10.242.238.88:<0.4813.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.406,ns_1@10.242.238.88:<0.4813.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:20.406,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 979) [ns_server:debug,2014-08-19T16:50:20.407,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.407,ns_1@10.242.238.88:<0.2483.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 456 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:20.407,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 963 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:20.407,ns_1@10.242.238.88:<0.5254.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 963 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:debug,2014-08-19T16:50:20.408,ns_1@10.242.238.88:<0.2491.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:20.411,ns_1@10.242.238.88:<0.2491.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_456_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:20.416,ns_1@10.242.238.88:<0.4848.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_967 [views:debug,2014-08-19T16:50:20.418,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/581. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:20.418,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",581,active,0} [rebalance:info,2014-08-19T16:50:20.417,ns_1@10.242.238.88:<0.4848.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[967]}, {checkpoints,[{967,1}]}, {name,<<"rebalance_967">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[967]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"967"}]} [rebalance:debug,2014-08-19T16:50:20.419,ns_1@10.242.238.88:<0.4848.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5258.1> [rebalance:info,2014-08-19T16:50:20.420,ns_1@10.242.238.88:<0.4848.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:20.422,ns_1@10.242.238.88:<0.4848.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.422,ns_1@10.242.238.88:<0.4848.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.422,ns_1@10.242.238.88:<0.2323.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 967 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:20.423,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.423,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.423,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.424,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.424,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{963, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:20.424,ns_1@10.242.238.88:<0.2331.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:20.427,ns_1@10.242.238.88:<0.2331.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_967_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:20.430,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 963 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.430,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 963) [ns_server:debug,2014-08-19T16:50:20.431,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.431,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 451 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:20.431,ns_1@10.242.238.88:<0.5270.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 451 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:20.435,ns_1@10.242.238.88:<0.4821.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_712 [rebalance:info,2014-08-19T16:50:20.436,ns_1@10.242.238.88:<0.4821.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[712]}, {checkpoints,[{712,1}]}, {name,<<"rebalance_712">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[712]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"712"}]} [rebalance:debug,2014-08-19T16:50:20.437,ns_1@10.242.238.88:<0.4821.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5272.1> [rebalance:info,2014-08-19T16:50:20.437,ns_1@10.242.238.88:<0.4821.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:20.439,ns_1@10.242.238.88:<0.4821.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.439,ns_1@10.242.238.88:<0.4821.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.439,ns_1@10.242.238.88:<0.2421.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 712 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:20.441,ns_1@10.242.238.88:<0.2429.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:20.444,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.445,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:20.445,ns_1@10.242.238.88:<0.2429.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_712_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:20.445,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{451, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.445,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.447,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.450,ns_1@10.242.238.88:<0.4839.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_965 [rebalance:info,2014-08-19T16:50:20.454,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 451 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:50:20.454,ns_1@10.242.238.88:<0.4839.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[965]}, {checkpoints,[{965,1}]}, {name,<<"rebalance_965">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[965]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"965"}]} [ns_server:debug,2014-08-19T16:50:20.454,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 451) [rebalance:debug,2014-08-19T16:50:20.455,ns_1@10.242.238.88:<0.4839.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5284.1> [ns_server:debug,2014-08-19T16:50:20.455,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.455,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 960 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:20.455,ns_1@10.242.238.88:<0.4839.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:20.455,ns_1@10.242.238.88:<0.5286.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 960 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:debug,2014-08-19T16:50:20.457,ns_1@10.242.238.88:<0.4839.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.457,ns_1@10.242.238.88:<0.4839.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.458,ns_1@10.242.238.88:<0.2504.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 965 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:20.460,ns_1@10.242.238.88:<0.2512.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:20.463,ns_1@10.242.238.88:<0.2512.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_965_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:20.468,ns_1@10.242.238.88:<0.4847.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_711 [rebalance:info,2014-08-19T16:50:20.471,ns_1@10.242.238.88:<0.4847.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[711]}, {checkpoints,[{711,1}]}, {name,<<"rebalance_711">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[711]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"711"}]} [rebalance:debug,2014-08-19T16:50:20.471,ns_1@10.242.238.88:<0.4847.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5289.1> [rebalance:info,2014-08-19T16:50:20.472,ns_1@10.242.238.88:<0.4847.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:20.474,ns_1@10.242.238.88:<0.4847.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.474,ns_1@10.242.238.88:<0.4847.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.475,ns_1@10.242.238.88:<0.2539.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 711 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:20.477,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:20.478,ns_1@10.242.238.88:<0.2547.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:20.478,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.479,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.479,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{960, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.479,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:20.481,ns_1@10.242.238.88:<0.2547.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_711_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:20.482,ns_1@10.242.238.88:<0.4814.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_449 [rebalance:info,2014-08-19T16:50:20.484,ns_1@10.242.238.88:<0.4814.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[449]}, {checkpoints,[{449,1}]}, {name,<<"rebalance_449">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[449]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"449"}]} [rebalance:debug,2014-08-19T16:50:20.485,ns_1@10.242.238.88:<0.4814.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5300.1> [rebalance:info,2014-08-19T16:50:20.490,ns_1@10.242.238.88:<0.4814.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:20.490,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 960 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.491,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 960) [ns_server:debug,2014-08-19T16:50:20.492,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:debug,2014-08-19T16:50:20.492,ns_1@10.242.238.88:<0.4814.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.492,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 715 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:20.492,ns_1@10.242.238.88:<0.4814.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.492,ns_1@10.242.238.88:<0.5303.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 715 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:info,2014-08-19T16:50:20.493,ns_1@10.242.238.88:<0.3021.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 449 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:20.495,ns_1@10.242.238.88:<0.3029.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:20.498,ns_1@10.242.238.88:<0.3029.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_449_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:20.507,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.507,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.508,ns_1@10.242.238.88:<0.4849.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_971 [ns_server:debug,2014-08-19T16:50:20.508,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{715, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.508,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.509,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:20.509,ns_1@10.242.238.88:<0.4849.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[971]}, {checkpoints,[{971,1}]}, {name,<<"rebalance_971">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[971]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"971"}]} [rebalance:debug,2014-08-19T16:50:20.511,ns_1@10.242.238.88:<0.4849.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.5322.1> [rebalance:info,2014-08-19T16:50:20.512,ns_1@10.242.238.88:<0.4849.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:20.514,ns_1@10.242.238.88:<0.4849.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:20.514,ns_1@10.242.238.88:<0.4849.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:20.515,ns_1@10.242.238.88:<0.1996.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 971 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:20.517,ns_1@10.242.238.88:<0.2004.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:20.521,ns_1@10.242.238.88:<0.2004.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_971_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:20.523,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 715 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.524,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 715) [ns_server:debug,2014-08-19T16:50:20.525,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.525,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 705 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:20.526,ns_1@10.242.238.88:<0.5333.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 705 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:20.539,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.539,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.540,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.540,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.540,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{705, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:20.546,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 705 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.546,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 705) [ns_server:debug,2014-08-19T16:50:20.547,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.547,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 714 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:20.547,ns_1@10.242.238.88:<0.5343.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 714 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:20.554,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 579. Nacking mccouch update. [views:debug,2014-08-19T16:50:20.554,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/579. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:20.554,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",579,active,0} [ns_server:debug,2014-08-19T16:50:20.556,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,829,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,671,488, 360,905,777,722,594,228,956,828,645,462,334,879,696,568,202,1007,930,802,747, 619,436,308,981,853,670,542,176,904,776,721,593,410,282,955,827,644,516,150, 878,695,384,256,1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720, 592,226,954,826,643,460,332,877,694,566,200,1005,928,800,745,617,434,306,979, 851,668,540,174,902,774,719,591,408,280,953,825,642,514,148,876,693,510,382, 1004,927,799,744,616,250,122,978,850,667,484,356,901,773,718,590,224,952,824, 641,458,330,875,692,564,198,1003,926,798,743,615,432,304,977,849,666,538,172, 900,772,717,589,406,278,951,823,640,512,146,874,691,508,380,1002,925,797,742, 614,248,120,976,848,665,482,354,899,771,716,588,222,950,822,767,639,456,328, 873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715, 587,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740,612,246,118, 974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688, 560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274, 947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844,661, 478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920, 792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762, 634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607, 424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994, 866,683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967, 839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864,681,498, 370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812, 757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654,526, 160,888,705,394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785,730, 602,236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444,316, 989,861,678,550,184,912,784,729,601,418,290,963,835,652,524,158,886,703,392, 264,1014,937,809,754,626,132,988,860,677,494,366,911,783,728,600,234,962,834, 651,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,182, 910,782,727,599,416,288,961,833,650,522,156,884,701,390,262,1012,935,807,752, 624,130,986,858,675,492,364,909,781,726,598,232,960,832,649,466,338,883,700, 572,206,1011,934,806,751,623,440,312,985,857,674,546,180,908,780,725,597,414, 286,959,831,648,520,154,882,699,388,260,1010,805,750,128,984,673,362,907,596, 230,830,464,698,1009,932,621,310,855,544,178,778,723,412,957,646] [ns_server:debug,2014-08-19T16:50:20.562,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.562,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.563,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.563,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{714, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.564,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:20.575,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 714 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.575,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 714) [ns_server:debug,2014-08-19T16:50:20.576,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.576,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 968 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:20.576,ns_1@10.242.238.88:<0.5354.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 968 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:20.597,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.597,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.598,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.598,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.598,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{968, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:20.603,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 968 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.604,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 968) [ns_server:debug,2014-08-19T16:50:20.605,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.605,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 964 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:20.605,ns_1@10.242.238.88:<0.5365.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 964 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [views:debug,2014-08-19T16:50:20.613,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/579. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:20.613,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",579,active,0} [ns_server:debug,2014-08-19T16:50:20.623,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.624,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.624,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.624,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.624,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{964, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:20.630,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 964 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.631,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 964) [ns_server:debug,2014-08-19T16:50:20.632,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.632,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 716 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:20.632,ns_1@10.242.238.88:<0.5376.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 716 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:20.646,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.646,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.646,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.647,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.647,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{716, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:20.654,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 716 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.654,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 716) [ns_server:debug,2014-08-19T16:50:20.655,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.655,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 706 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:20.655,ns_1@10.242.238.88:<0.5400.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 706 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:20.672,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.672,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.673,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.673,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.673,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{706, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:20.684,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 706 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.684,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 706) [ns_server:debug,2014-08-19T16:50:20.685,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.686,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 717 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:20.686,ns_1@10.242.238.88:<0.5411.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 717 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:20.688,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 577. Nacking mccouch update. [views:debug,2014-08-19T16:50:20.688,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/577. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:20.689,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",577,active,0} [ns_server:debug,2014-08-19T16:50:20.690,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,829,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,671,488, 360,905,777,722,594,228,956,828,645,462,334,879,696,568,202,1007,930,802,747, 619,436,308,981,853,670,542,176,904,776,721,593,410,282,955,827,644,516,150, 878,695,384,256,1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720, 592,226,954,826,643,460,332,877,694,566,200,1005,928,800,745,617,434,306,979, 851,668,540,174,902,774,719,591,408,280,953,825,642,514,148,876,693,510,382, 1004,927,799,744,616,250,122,978,850,667,484,356,901,773,718,590,224,952,824, 641,458,330,875,692,564,198,1003,926,798,743,615,432,304,977,849,666,538,172, 900,772,717,589,406,278,951,823,640,512,146,874,691,508,380,1002,925,797,742, 614,248,120,976,848,665,482,354,899,771,716,588,222,950,822,767,639,456,328, 873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715, 587,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740,612,246,118, 974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688, 560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274, 947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844,661, 478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920, 792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762, 634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607, 424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994, 866,683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967, 839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864,681,498, 370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812, 757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654,526, 160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785, 730,602,236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444, 316,989,861,678,550,184,912,784,729,601,418,290,963,835,652,524,158,886,703, 392,264,1014,937,809,754,626,132,988,860,677,494,366,911,783,728,600,234,962, 834,651,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548, 182,910,782,727,599,416,288,961,833,650,522,156,884,701,390,262,1012,935,807, 752,624,130,986,858,675,492,364,909,781,726,598,232,960,832,649,466,338,883, 700,572,206,1011,934,806,751,623,440,312,985,857,674,546,180,908,780,725,597, 414,286,959,831,648,520,154,882,699,388,260,1010,805,750,128,984,673,362,907, 596,230,830,464,698,1009,932,621,310,855,544,178,778,723,412,957,646] [ns_server:debug,2014-08-19T16:50:20.698,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.699,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.699,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{717, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.699,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.699,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:20.704,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 717 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.705,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 717) [ns_server:debug,2014-08-19T16:50:20.706,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.706,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 969 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:20.706,ns_1@10.242.238.88:<0.5422.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 969 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [views:debug,2014-08-19T16:50:20.722,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/577. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:20.722,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",577,active,0} [ns_server:debug,2014-08-19T16:50:20.740,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.741,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.741,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{969, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.742,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.742,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:50:20.754,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 969 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.755,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 969) [ns_server:debug,2014-08-19T16:50:20.755,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.755,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 458 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:20.756,ns_1@10.242.238.88:<0.5434.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 458 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:20.773,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.774,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.774,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{458, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.774,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.774,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:20.780,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 458 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.781,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 458) [ns_server:debug,2014-08-19T16:50:20.781,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.782,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 970 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:20.782,ns_1@10.242.238.88:<0.5458.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 970 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:20.797,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 575. Nacking mccouch update. [views:debug,2014-08-19T16:50:20.797,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/575. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:20.798,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",575,active,0} [ns_server:debug,2014-08-19T16:50:20.799,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,829,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,671,488, 360,905,777,722,594,228,956,828,645,462,334,879,696,568,202,1007,930,802,747, 619,436,308,981,853,670,542,176,904,776,721,593,410,282,955,827,644,516,150, 878,695,384,256,1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720, 592,226,954,826,643,460,332,877,694,566,200,1005,928,800,745,617,434,306,979, 851,668,540,174,902,774,719,591,408,280,953,825,642,514,148,876,693,510,382, 1004,927,799,744,616,250,122,978,850,667,484,356,901,773,718,590,224,952,824, 641,458,330,875,692,564,198,1003,926,798,743,615,432,304,977,849,666,538,172, 900,772,717,589,406,278,951,823,640,512,146,874,691,508,380,1002,925,797,742, 614,248,120,976,848,665,482,354,899,771,716,588,222,950,822,767,639,456,328, 873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715, 587,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740,612,246,118, 974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688, 560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274, 947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844,661, 478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920, 792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762, 634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607, 424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994, 866,683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967, 839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864,681,498, 370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812, 757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654,526, 160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785, 730,602,236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444, 316,989,861,678,550,184,912,784,729,601,418,290,963,835,652,524,158,886,703, 575,392,264,1014,937,809,754,626,132,988,860,677,494,366,911,783,728,600,234, 962,834,651,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676, 548,182,910,782,727,599,416,288,961,833,650,522,156,884,701,390,262,1012,935, 807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832,649,466,338, 883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,180,908,780,725, 597,414,286,959,831,648,520,154,882,699,388,260,1010,805,750,128,984,673,362, 907,596,230,830,464,698,1009,932,621,310,855,544,178,778,723,412,957,646] [ns_server:debug,2014-08-19T16:50:20.805,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.805,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.806,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{970, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.806,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.806,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:20.813,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 970 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.814,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 970) [ns_server:debug,2014-08-19T16:50:20.814,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.814,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 456 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:20.814,ns_1@10.242.238.88:<0.5469.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 456 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:20.827,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.828,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.828,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.828,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{456, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.829,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:20.839,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 456 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.839,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 456) [ns_server:debug,2014-08-19T16:50:20.840,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.840,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 967 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:20.841,ns_1@10.242.238.88:<0.5480.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 967 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [views:debug,2014-08-19T16:50:20.857,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/575. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:20.857,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",575,active,0} [ns_server:debug,2014-08-19T16:50:20.860,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.861,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.862,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.862,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.862,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{967, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:20.876,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 967 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.877,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.877,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 712 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:20.877,ns_1@10.242.238.88:<0.5491.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 712 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:20.878,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 967) [ns_server:debug,2014-08-19T16:50:20.891,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.892,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{712, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.892,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.892,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.893,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:20.907,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 712 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.908,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 712) [ns_server:debug,2014-08-19T16:50:20.908,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.908,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 965 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:20.908,ns_1@10.242.238.88:<0.5502.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 965 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:20.929,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.930,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.930,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.930,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.930,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{965, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:20.940,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 965 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.941,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 965) [ns_server:debug,2014-08-19T16:50:20.942,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:20.942,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 711 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:20.942,ns_1@10.242.238.88:<0.5527.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 711 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:20.956,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.957,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.957,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{711, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.957,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.957,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:20.968,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 711 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 711) [ns_server:debug,2014-08-19T16:50:20.969,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{compact,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:20.969,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 449 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:20.969,ns_1@10.242.238.88:<0.5539.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 449 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:20.981,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.982,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:20.982,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{449, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:20.982,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:20.983,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:20.988,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 449 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:20.989,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 449) [ns_server:debug,2014-08-19T16:50:20.990,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{compact,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:20.990,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 971 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:20.990,ns_1@10.242.238.88:<0.5551.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 971 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:21.007,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 573. Nacking mccouch update. [views:debug,2014-08-19T16:50:21.007,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/573. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:21.007,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",573,active,0} [ns_server:debug,2014-08-19T16:50:21.009,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,829,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,671,488, 360,905,777,722,594,228,956,828,645,462,334,879,696,568,202,1007,930,802,747, 619,436,308,981,853,670,542,176,904,776,721,593,410,282,955,827,644,516,150, 878,695,384,256,1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720, 592,226,954,826,643,460,332,877,694,566,200,1005,928,800,745,617,434,306,979, 851,668,540,174,902,774,719,591,408,280,953,825,642,514,148,876,693,510,382, 1004,927,799,744,616,250,122,978,850,667,484,356,901,773,718,590,224,952,824, 641,458,330,875,692,564,198,1003,926,798,743,615,432,304,977,849,666,538,172, 900,772,717,589,406,278,951,823,640,512,146,874,691,508,380,1002,925,797,742, 614,248,120,976,848,665,482,354,899,771,716,588,222,950,822,767,639,456,328, 873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715, 587,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740,612,246,118, 974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688, 560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274, 947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844,661, 478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920, 792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762, 634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607, 424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994, 866,683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967, 839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864,681,498, 370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812, 757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654,526, 160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785, 730,602,236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444, 316,989,861,678,550,184,912,784,729,601,418,290,963,835,652,524,158,886,703, 575,392,264,1014,937,809,754,626,132,988,860,677,494,366,911,783,728,600,234, 962,834,651,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676, 548,182,910,782,727,599,416,288,961,833,650,522,156,884,701,573,390,262,1012, 935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832,649,466, 338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,180,908,780, 725,597,414,286,959,831,648,520,154,882,699,388,260,1010,805,750,128,984,673, 362,907,596,230,830,464,698,1009,932,621,310,855,544,178,778,723,412,957,646] [ns_server:debug,2014-08-19T16:50:21.010,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:21.011,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{971, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:21.011,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:21.011,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:21.012,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:21.018,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 971 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:21.019,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 971) [ns_server:debug,2014-08-19T16:50:21.020,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{compact,'ns_1@10.242.238.91'},{compact,'ns_1@10.242.238.88'}] [ns_server:debug,2014-08-19T16:50:21.020,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:handle_info:203]noted compaction done: {compact,'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:21.021,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"default">>] [ns_server:debug,2014-08-19T16:50:21.022,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:compact_next_bucket:1453]Going to spawn bucket compaction with forced view compaction for bucket default [ns_server:debug,2014-08-19T16:50:21.022,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:21.022,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:compact_next_bucket:1482]Spawned 'uninhibited' compaction for default [ns_server:info,2014-08-19T16:50:21.025,ns_1@10.242.238.88:<0.5563.1>:compaction_daemon:check_all_dbs_exist:1611]Skipping compaction of bucket `default` since at least database `default/0` seems to be missing. [ns_server:debug,2014-08-19T16:50:21.025,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:50:21.025,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:handle_info:203]noted compaction done: {compact,'ns_1@10.242.238.88'} [ns_server:debug,2014-08-19T16:50:21.025,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:50:21.028,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{703, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:21.028,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",703, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.5564.1>) [ns_server:debug,2014-08-19T16:50:21.028,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 703) [ns_server:debug,2014-08-19T16:50:21.028,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:handle_info:203]noted compaction done: {compact,'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:50:21.028,ns_1@10.242.238.88:<0.5564.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 703 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:21.028,ns_1@10.242.238.88:<0.5570.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 703 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:21.029,ns_1@10.242.238.88:<0.5571.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 703 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:21.029,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:21.039,ns_1@10.242.238.88:<0.5572.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 703 into 'ns_1@10.242.238.91' is <18126.24590.0> [ns_server:debug,2014-08-19T16:50:21.041,ns_1@10.242.238.88:<0.5572.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 703 into 'ns_1@10.242.238.90' is <18125.22765.0> [rebalance:debug,2014-08-19T16:50:21.041,ns_1@10.242.238.88:<0.5564.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 703 is <0.5572.1> [ns_server:debug,2014-08-19T16:50:21.043,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:handle_info:203]noted compaction done: {compact,'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:21.044,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [views:debug,2014-08-19T16:50:21.066,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/573. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:21.066,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",573,active,0} [ns_server:debug,2014-08-19T16:50:21.080,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,71123}, tap_estimate, {replica_building,"default",703,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24590.0>, <<"replication_building_703_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:21.093,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,84362}, tap_estimate, {replica_building,"default",703,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.22765.0>, <<"replication_building_703_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:21.094,ns_1@10.242.238.88:<0.5573.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.22765.0>}, {'ns_1@10.242.238.91',<18126.24590.0>}]) [rebalance:info,2014-08-19T16:50:21.094,ns_1@10.242.238.88:<0.5564.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:21.094,ns_1@10.242.238.88:<0.5564.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 703 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:21.095,ns_1@10.242.238.88:<0.5564.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:21.096,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{703, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:21.099,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{959, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:21.099,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",959, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.5585.1>) [ns_server:debug,2014-08-19T16:50:21.100,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 959) [ns_server:debug,2014-08-19T16:50:21.100,ns_1@10.242.238.88:<0.5586.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [rebalance:info,2014-08-19T16:50:21.100,ns_1@10.242.238.88:<0.5585.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 959 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:21.100,ns_1@10.242.238.88:<0.5591.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 959 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:21.100,ns_1@10.242.238.88:<0.5592.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 959 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:21.104,ns_1@10.242.238.88:<0.5598.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 959 into 'ns_1@10.242.238.90' is <18125.22771.0> [ns_server:debug,2014-08-19T16:50:21.106,ns_1@10.242.238.88:<0.5598.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 959 into 'ns_1@10.242.238.91' is <18126.24595.0> [rebalance:debug,2014-08-19T16:50:21.106,ns_1@10.242.238.88:<0.5585.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 959 is <0.5598.1> [ns_server:debug,2014-08-19T16:50:21.138,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,129951}, tap_estimate, {replica_building,"default",959,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.22771.0>, <<"replication_building_959_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:21.152,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,143278}, tap_estimate, {replica_building,"default",959,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24595.0>, <<"replication_building_959_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:21.152,ns_1@10.242.238.88:<0.5607.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.24595.0>}, {'ns_1@10.242.238.90',<18125.22771.0>}]) [rebalance:info,2014-08-19T16:50:21.153,ns_1@10.242.238.88:<0.5585.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:21.153,ns_1@10.242.238.88:<0.5585.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 959 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:21.154,ns_1@10.242.238.88:<0.5585.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:21.154,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{959, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:21.158,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{447, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:21.158,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",447, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.5625.1>) [ns_server:debug,2014-08-19T16:50:21.158,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 447) [ns_server:debug,2014-08-19T16:50:21.159,ns_1@10.242.238.88:<0.5626.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [rebalance:info,2014-08-19T16:50:21.159,ns_1@10.242.238.88:<0.5625.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 447 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:21.159,ns_1@10.242.238.88:<0.5631.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 447 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:21.159,ns_1@10.242.238.88:<0.5632.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 447 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:21.163,ns_1@10.242.238.88:<0.5633.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 447 into 'ns_1@10.242.238.91' is <18126.24615.0> [ns_server:debug,2014-08-19T16:50:21.164,ns_1@10.242.238.88:<0.5633.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 447 into 'ns_1@10.242.238.89' is <18124.27107.0> [rebalance:debug,2014-08-19T16:50:21.164,ns_1@10.242.238.88:<0.5625.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 447 is <0.5633.1> [ns_server:debug,2014-08-19T16:50:21.167,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 571. Nacking mccouch update. [views:debug,2014-08-19T16:50:21.167,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/571. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:21.168,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",571,active,0} [ns_server:debug,2014-08-19T16:50:21.169,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,829,518,152,880,697,386,258,1008,931,803,748,620,254,126,982,854,671,488, 360,905,777,722,594,228,956,828,645,462,334,879,696,568,202,1007,930,802,747, 619,436,308,981,853,670,542,176,904,776,721,593,410,282,955,827,644,516,150, 878,695,384,256,1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720, 592,226,954,826,643,460,332,877,694,566,200,1005,928,800,745,617,434,306,979, 851,668,540,174,902,774,719,591,408,280,953,825,642,514,148,876,693,510,382, 1004,927,799,744,616,250,122,978,850,667,484,356,901,773,718,590,224,952,824, 641,458,330,875,692,564,198,1003,926,798,743,615,432,304,977,849,666,538,172, 900,772,717,589,406,278,951,823,640,512,146,874,691,508,380,1002,925,797,742, 614,248,120,976,848,665,482,354,899,771,716,588,222,950,822,767,639,456,328, 873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715, 587,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740,612,246,118, 974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688, 560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274, 947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844,661, 478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920, 792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762, 634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607, 424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994, 866,683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967, 839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864,681,498, 370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812, 757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654,526, 160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785, 730,602,236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444, 316,989,861,678,550,184,912,784,729,601,418,290,963,835,652,524,158,886,703, 575,392,264,1014,937,809,754,626,132,988,860,677,494,366,911,783,728,600,234, 962,834,651,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676, 548,182,910,782,727,599,416,288,961,833,650,522,156,884,701,573,390,262,1012, 935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832,649,466, 338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,180,908,780, 725,597,414,286,959,831,648,520,154,882,699,571,388,260,1010,805,750,128,984, 673,362,907,596,230,830,464,698,1009,932,621,310,855,544,178,778,723,412,957, 646] [ns_server:debug,2014-08-19T16:50:21.196,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,187798}, tap_estimate, {replica_building,"default",447,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24615.0>, <<"replication_building_447_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:21.215,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,206067}, tap_estimate, {replica_building,"default",447,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27107.0>, <<"replication_building_447_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:21.215,ns_1@10.242.238.88:<0.5634.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27107.0>}, {'ns_1@10.242.238.91',<18126.24615.0>}]) [rebalance:info,2014-08-19T16:50:21.215,ns_1@10.242.238.88:<0.5625.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:21.216,ns_1@10.242.238.88:<0.5625.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 447 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:21.217,ns_1@10.242.238.88:<0.5625.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:21.217,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{447, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:21.221,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{958, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:21.221,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",958, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.5646.1>) [ns_server:debug,2014-08-19T16:50:21.222,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 958) [ns_server:debug,2014-08-19T16:50:21.222,ns_1@10.242.238.88:<0.5647.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:21.222,ns_1@10.242.238.88:<0.5647.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:21.223,ns_1@10.242.238.88:<0.5646.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 958 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:21.223,ns_1@10.242.238.88:<0.5652.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 958 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:21.223,ns_1@10.242.238.88:<0.5653.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 958 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [views:debug,2014-08-19T16:50:21.226,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/571. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:21.227,ns_1@10.242.238.88:<0.5654.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 958 into 'ns_1@10.242.238.90' is <18125.22790.0> [ns_server:debug,2014-08-19T16:50:21.227,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",571,active,0} [ns_server:debug,2014-08-19T16:50:21.228,ns_1@10.242.238.88:<0.5654.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 958 into 'ns_1@10.242.238.91' is <18126.24620.0> [rebalance:debug,2014-08-19T16:50:21.228,ns_1@10.242.238.88:<0.5646.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 958 is <0.5654.1> [ns_server:debug,2014-08-19T16:50:21.260,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,251072}, tap_estimate, {replica_building,"default",958,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.22790.0>, <<"replication_building_958_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:21.274,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,265289}, tap_estimate, {replica_building,"default",958,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24620.0>, <<"replication_building_958_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:21.274,ns_1@10.242.238.88:<0.5655.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.24620.0>}, {'ns_1@10.242.238.90',<18125.22790.0>}]) [rebalance:info,2014-08-19T16:50:21.274,ns_1@10.242.238.88:<0.5646.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:21.275,ns_1@10.242.238.88:<0.5646.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 958 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:21.276,ns_1@10.242.238.88:<0.5646.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:21.276,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{958, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:21.280,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{702, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:21.280,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",702, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.5667.1>) [ns_server:debug,2014-08-19T16:50:21.280,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 702) [ns_server:debug,2014-08-19T16:50:21.281,ns_1@10.242.238.88:<0.5668.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:21.281,ns_1@10.242.238.88:<0.5668.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:21.281,ns_1@10.242.238.88:<0.5667.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 702 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:21.281,ns_1@10.242.238.88:<0.5673.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 702 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:21.281,ns_1@10.242.238.88:<0.5674.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 702 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:21.285,ns_1@10.242.238.88:<0.5675.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 702 into 'ns_1@10.242.238.91' is <18126.24626.0> [ns_server:debug,2014-08-19T16:50:21.287,ns_1@10.242.238.88:<0.5675.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 702 into 'ns_1@10.242.238.90' is <18125.22795.0> [rebalance:debug,2014-08-19T16:50:21.287,ns_1@10.242.238.88:<0.5667.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 702 is <0.5675.1> [ns_server:debug,2014-08-19T16:50:21.317,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,308423}, tap_estimate, {replica_building,"default",702,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24626.0>, <<"replication_building_702_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:21.332,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,323193}, tap_estimate, {replica_building,"default",702,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.22795.0>, <<"replication_building_702_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:21.332,ns_1@10.242.238.88:<0.5676.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.22795.0>}, {'ns_1@10.242.238.91',<18126.24626.0>}]) [rebalance:info,2014-08-19T16:50:21.332,ns_1@10.242.238.88:<0.5667.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:21.333,ns_1@10.242.238.88:<0.5667.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 702 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:21.333,ns_1@10.242.238.88:<0.5667.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:21.334,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{702, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:21.338,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{446, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:21.338,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",446, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.5702.1>) [ns_server:debug,2014-08-19T16:50:21.338,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 446) [ns_server:debug,2014-08-19T16:50:21.339,ns_1@10.242.238.88:<0.5703.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:21.339,ns_1@10.242.238.88:<0.5703.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:21.339,ns_1@10.242.238.88:<0.5702.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 446 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:21.339,ns_1@10.242.238.88:<0.5708.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 446 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:21.339,ns_1@10.242.238.88:<0.5709.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 446 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:21.343,ns_1@10.242.238.88:<0.5710.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 446 into 'ns_1@10.242.238.91' is <18126.24631.0> [ns_server:debug,2014-08-19T16:50:21.343,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 569. Nacking mccouch update. [views:debug,2014-08-19T16:50:21.343,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/569. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:21.343,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",569,active,0} [ns_server:debug,2014-08-19T16:50:21.345,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,829,518,152,697,386,1008,931,803,748,620,254,126,982,854,671,488,360,905, 777,722,594,228,956,828,645,462,334,879,696,568,202,1007,930,802,747,619,436, 308,981,853,670,542,176,904,776,721,593,410,282,955,827,644,516,150,878,695, 384,256,1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720,592,226, 954,826,643,460,332,877,694,566,200,1005,928,800,745,617,434,306,979,851,668, 540,174,902,774,719,591,408,280,953,825,642,514,148,876,693,510,382,1004,927, 799,744,616,250,122,978,850,667,484,356,901,773,718,590,224,952,824,641,458, 330,875,692,564,198,1003,926,798,743,615,432,304,977,849,666,538,172,900,772, 717,589,406,278,951,823,640,512,146,874,691,508,380,1002,925,797,742,614,248, 120,976,848,665,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690, 562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404, 276,949,821,766,638,144,872,689,506,378,1000,923,795,740,612,246,118,974,846, 663,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,194, 922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819, 764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844,661,478,350, 895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920,792,737, 609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634,140, 996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893,710,582, 216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607,424,296, 969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994,866,683, 500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019,942, 814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967,839,656, 528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864,681,498,370,915, 787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757,629, 446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654,526,160,888, 705,577,394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785,730,602, 236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444,316,989, 861,678,550,184,912,784,729,601,418,290,963,835,652,524,158,886,703,575,392, 264,1014,937,809,754,626,132,988,860,677,494,366,911,783,728,600,234,962,834, 651,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,182, 910,782,727,599,416,288,961,833,650,522,156,884,701,573,390,262,1012,935,807, 752,624,130,986,858,675,492,364,909,781,726,598,232,960,832,649,466,338,883, 700,572,206,1011,934,806,751,623,440,312,985,857,674,546,180,908,780,725,597, 414,286,959,831,648,520,154,882,699,571,388,260,1010,805,750,128,984,673,362, 907,596,230,830,464,698,1009,932,621,310,855,544,178,778,723,412,957,646,880, 569,258] [ns_server:debug,2014-08-19T16:50:21.346,ns_1@10.242.238.88:<0.5710.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 446 into 'ns_1@10.242.238.89' is <18124.27127.0> [rebalance:debug,2014-08-19T16:50:21.346,ns_1@10.242.238.88:<0.5702.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 446 is <0.5710.1> [ns_server:debug,2014-08-19T16:50:21.375,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,366768}, tap_estimate, {replica_building,"default",446,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24631.0>, <<"replication_building_446_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:21.392,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,383879}, tap_estimate, {replica_building,"default",446,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27127.0>, <<"replication_building_446_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:21.393,ns_1@10.242.238.88:<0.5711.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27127.0>}, {'ns_1@10.242.238.91',<18126.24631.0>}]) [rebalance:info,2014-08-19T16:50:21.393,ns_1@10.242.238.88:<0.5702.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:21.394,ns_1@10.242.238.88:<0.5702.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 446 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:21.394,ns_1@10.242.238.88:<0.5702.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:21.395,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{446, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:21.399,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{957, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:21.399,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",957, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.5731.1>) [ns_server:debug,2014-08-19T16:50:21.399,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 957) [ns_server:debug,2014-08-19T16:50:21.399,ns_1@10.242.238.88:<0.5732.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:21.400,ns_1@10.242.238.88:<0.5732.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:21.400,ns_1@10.242.238.88:<0.5731.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 957 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:21.400,ns_1@10.242.238.88:<0.5737.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 957 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:21.400,ns_1@10.242.238.88:<0.5738.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 957 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:21.404,ns_1@10.242.238.88:<0.5739.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 957 into 'ns_1@10.242.238.90' is <18125.22815.0> [ns_server:debug,2014-08-19T16:50:21.406,ns_1@10.242.238.88:<0.5739.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 957 into 'ns_1@10.242.238.91' is <18126.24650.0> [rebalance:debug,2014-08-19T16:50:21.406,ns_1@10.242.238.88:<0.5731.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 957 is <0.5739.1> [views:debug,2014-08-19T16:50:21.419,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/569. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:21.419,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",569,active,0} [ns_server:debug,2014-08-19T16:50:21.439,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,429986}, tap_estimate, {replica_building,"default",957,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.22815.0>, <<"replication_building_957_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:21.450,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,441780}, tap_estimate, {replica_building,"default",957,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24650.0>, <<"replication_building_957_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:21.451,ns_1@10.242.238.88:<0.5740.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.24650.0>}, {'ns_1@10.242.238.90',<18125.22815.0>}]) [rebalance:info,2014-08-19T16:50:21.451,ns_1@10.242.238.88:<0.5731.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:21.452,ns_1@10.242.238.88:<0.5731.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 957 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:21.452,ns_1@10.242.238.88:<0.5731.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:21.452,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{957, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:21.456,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{701, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:21.456,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",701, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.5754.1>) [ns_server:debug,2014-08-19T16:50:21.457,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 701) [ns_server:debug,2014-08-19T16:50:21.457,ns_1@10.242.238.88:<0.5755.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:21.457,ns_1@10.242.238.88:<0.5755.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:21.457,ns_1@10.242.238.88:<0.5754.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 701 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:21.457,ns_1@10.242.238.88:<0.5760.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 701 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:21.457,ns_1@10.242.238.88:<0.5761.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 701 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:21.461,ns_1@10.242.238.88:<0.5762.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 701 into 'ns_1@10.242.238.91' is <18126.24656.0> [ns_server:debug,2014-08-19T16:50:21.464,ns_1@10.242.238.88:<0.5762.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 701 into 'ns_1@10.242.238.90' is <18125.22826.0> [rebalance:debug,2014-08-19T16:50:21.464,ns_1@10.242.238.88:<0.5754.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 701 is <0.5762.1> [ns_server:debug,2014-08-19T16:50:21.494,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,485483}, tap_estimate, {replica_building,"default",701,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24656.0>, <<"replication_building_701_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:21.508,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,499545}, tap_estimate, {replica_building,"default",701,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.22826.0>, <<"replication_building_701_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:21.509,ns_1@10.242.238.88:<0.5763.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.22826.0>}, {'ns_1@10.242.238.91',<18126.24656.0>}]) [rebalance:info,2014-08-19T16:50:21.509,ns_1@10.242.238.88:<0.5754.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:21.509,ns_1@10.242.238.88:<0.5754.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 701 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:21.510,ns_1@10.242.238.88:<0.5754.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:21.511,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{701, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:21.514,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{445, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:21.514,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",445, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.5789.1>) [ns_server:debug,2014-08-19T16:50:21.515,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 445) [ns_server:debug,2014-08-19T16:50:21.515,ns_1@10.242.238.88:<0.5790.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:21.515,ns_1@10.242.238.88:<0.5790.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:21.515,ns_1@10.242.238.88:<0.5789.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 445 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:21.515,ns_1@10.242.238.88:<0.5795.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 445 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:21.515,ns_1@10.242.238.88:<0.5796.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 445 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:21.519,ns_1@10.242.238.88:<0.5797.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 445 into 'ns_1@10.242.238.91' is <18126.24661.0> [ns_server:debug,2014-08-19T16:50:21.522,ns_1@10.242.238.88:<0.5797.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 445 into 'ns_1@10.242.238.89' is <18124.27147.0> [rebalance:debug,2014-08-19T16:50:21.522,ns_1@10.242.238.88:<0.5789.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 445 is <0.5797.1> [ns_server:debug,2014-08-19T16:50:21.553,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,544005}, tap_estimate, {replica_building,"default",445,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24661.0>, <<"replication_building_445_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:21.568,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,559015}, tap_estimate, {replica_building,"default",445,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27147.0>, <<"replication_building_445_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:21.568,ns_1@10.242.238.88:<0.5798.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27147.0>}, {'ns_1@10.242.238.91',<18126.24661.0>}]) [rebalance:info,2014-08-19T16:50:21.568,ns_1@10.242.238.88:<0.5789.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:21.569,ns_1@10.242.238.88:<0.5789.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 445 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:21.570,ns_1@10.242.238.88:<0.5789.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:21.570,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{445, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:21.574,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{956, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:21.574,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",956, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.5810.1>) [ns_server:debug,2014-08-19T16:50:21.574,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 956) [ns_server:debug,2014-08-19T16:50:21.575,ns_1@10.242.238.88:<0.5811.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:21.575,ns_1@10.242.238.88:<0.5811.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:21.575,ns_1@10.242.238.88:<0.5810.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 956 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:21.575,ns_1@10.242.238.88:<0.5816.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 956 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:21.575,ns_1@10.242.238.88:<0.5817.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 956 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:21.579,ns_1@10.242.238.88:<0.5818.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 956 into 'ns_1@10.242.238.90' is <18125.22833.0> [ns_server:debug,2014-08-19T16:50:21.581,ns_1@10.242.238.88:<0.5818.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 956 into 'ns_1@10.242.238.91' is <18126.24688.0> [rebalance:debug,2014-08-19T16:50:21.581,ns_1@10.242.238.88:<0.5810.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 956 is <0.5818.1> [ns_server:debug,2014-08-19T16:50:21.586,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 567. Nacking mccouch update. [views:debug,2014-08-19T16:50:21.586,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/567. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:21.586,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",567,active,0} [ns_server:debug,2014-08-19T16:50:21.587,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,829,518,152,697,386,1008,931,803,748,620,254,126,982,854,671,488,360,905, 777,722,594,228,956,828,645,462,334,879,696,568,202,1007,930,802,747,619,436, 308,981,853,670,542,176,904,776,721,593,410,282,955,827,644,516,150,878,695, 567,384,256,1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720,592, 226,954,826,643,460,332,877,694,566,200,1005,928,800,745,617,434,306,979,851, 668,540,174,902,774,719,591,408,280,953,825,642,514,148,876,693,510,382,1004, 927,799,744,616,250,122,978,850,667,484,356,901,773,718,590,224,952,824,641, 458,330,875,692,564,198,1003,926,798,743,615,432,304,977,849,666,538,172,900, 772,717,589,406,278,951,823,640,512,146,874,691,508,380,1002,925,797,742,614, 248,120,976,848,665,482,354,899,771,716,588,222,950,822,767,639,456,328,873, 690,562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715,587, 404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740,612,246,118,974, 846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560, 194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947, 819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844,661,478, 350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920,792, 737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634, 140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893,710, 582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607,424, 296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994,866, 683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019, 942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967,839, 656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864,681,498,370, 915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812,757, 629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654,526,160, 888,705,577,394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785,730, 602,236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444,316, 989,861,678,550,184,912,784,729,601,418,290,963,835,652,524,158,886,703,575, 392,264,1014,937,809,754,626,132,988,860,677,494,366,911,783,728,600,234,962, 834,651,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548, 182,910,782,727,599,416,288,961,833,650,522,156,884,701,573,390,262,1012,935, 807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832,649,466,338, 883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,180,908,780,725, 597,414,286,959,831,648,520,154,882,699,571,388,260,1010,805,750,128,984,673, 362,907,596,230,830,464,698,1009,932,621,310,855,544,178,778,723,412,957,646, 880,569,258] [ns_server:debug,2014-08-19T16:50:21.611,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,602720}, tap_estimate, {replica_building,"default",956,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.22833.0>, <<"replication_building_956_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:21.625,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,616934}, tap_estimate, {replica_building,"default",956,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24688.0>, <<"replication_building_956_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:21.626,ns_1@10.242.238.88:<0.5820.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.24688.0>}, {'ns_1@10.242.238.90',<18125.22833.0>}]) [rebalance:info,2014-08-19T16:50:21.626,ns_1@10.242.238.88:<0.5810.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:21.627,ns_1@10.242.238.88:<0.5810.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 956 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:21.627,ns_1@10.242.238.88:<0.5810.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:21.628,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{956, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:21.632,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{700, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:21.632,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",700, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.5832.1>) [ns_server:debug,2014-08-19T16:50:21.632,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 700) [ns_server:debug,2014-08-19T16:50:21.632,ns_1@10.242.238.88:<0.5833.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:21.632,ns_1@10.242.238.88:<0.5833.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:21.632,ns_1@10.242.238.88:<0.5832.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 700 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:21.633,ns_1@10.242.238.88:<0.5838.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 700 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:21.633,ns_1@10.242.238.88:<0.5839.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 700 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:21.637,ns_1@10.242.238.88:<0.5840.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 700 into 'ns_1@10.242.238.91' is <18126.24696.0> [ns_server:debug,2014-08-19T16:50:21.639,ns_1@10.242.238.88:<0.5840.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 700 into 'ns_1@10.242.238.90' is <18125.22852.0> [rebalance:debug,2014-08-19T16:50:21.639,ns_1@10.242.238.88:<0.5832.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 700 is <0.5840.1> [views:debug,2014-08-19T16:50:21.645,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/567. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:21.645,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",567,active,0} [ns_server:debug,2014-08-19T16:50:21.669,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,660721}, tap_estimate, {replica_building,"default",700,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24696.0>, <<"replication_building_700_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:21.686,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,677170}, tap_estimate, {replica_building,"default",700,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.22852.0>, <<"replication_building_700_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:21.686,ns_1@10.242.238.88:<0.5841.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.22852.0>}, {'ns_1@10.242.238.91',<18126.24696.0>}]) [rebalance:info,2014-08-19T16:50:21.686,ns_1@10.242.238.88:<0.5832.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:21.687,ns_1@10.242.238.88:<0.5832.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 700 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:21.688,ns_1@10.242.238.88:<0.5832.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:21.688,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{700, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:21.692,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{444, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:21.692,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",444, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.5853.1>) [ns_server:debug,2014-08-19T16:50:21.693,ns_1@10.242.238.88:<0.5854.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:21.693,ns_1@10.242.238.88:<0.5854.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:21.693,ns_1@10.242.238.88:<0.5853.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 444 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:21.693,ns_1@10.242.238.88:<0.5859.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 444 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:21.693,ns_1@10.242.238.88:<0.5860.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 444 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:21.696,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 444) [ns_server:debug,2014-08-19T16:50:21.699,ns_1@10.242.238.88:<0.5861.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 444 into 'ns_1@10.242.238.91' is <18126.24701.0> [ns_server:debug,2014-08-19T16:50:21.701,ns_1@10.242.238.88:<0.5861.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 444 into 'ns_1@10.242.238.89' is <18124.27167.0> [rebalance:debug,2014-08-19T16:50:21.701,ns_1@10.242.238.88:<0.5853.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 444 is <0.5861.1> [ns_server:debug,2014-08-19T16:50:21.732,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,723141}, tap_estimate, {replica_building,"default",444,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24701.0>, <<"replication_building_444_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:21.745,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 565. Nacking mccouch update. [views:debug,2014-08-19T16:50:21.745,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/565. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:21.745,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",565,active,0} [ns_server:debug,2014-08-19T16:50:21.746,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,737614}, tap_estimate, {replica_building,"default",444,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27167.0>, <<"replication_building_444_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:21.747,ns_1@10.242.238.88:<0.5862.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27167.0>}, {'ns_1@10.242.238.91',<18126.24701.0>}]) [rebalance:info,2014-08-19T16:50:21.747,ns_1@10.242.238.88:<0.5853.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:21.747,ns_1@10.242.238.88:<0.5853.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 444 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:21.748,ns_1@10.242.238.88:<0.5853.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:21.747,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,829,518,152,697,386,1008,931,803,748,620,254,126,982,854,671,488,360,905, 777,722,594,228,956,828,645,462,334,879,696,568,202,1007,930,802,747,619,436, 308,981,853,670,542,176,904,776,721,593,410,282,955,827,644,516,150,878,695, 567,384,256,1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720,592, 226,954,826,643,460,332,877,694,566,200,1005,928,800,745,617,434,306,979,851, 668,540,174,902,774,719,591,408,280,953,825,642,514,148,876,693,565,510,382, 1004,927,799,744,616,250,122,978,850,667,484,356,901,773,718,590,224,952,824, 641,458,330,875,692,564,198,1003,926,798,743,615,432,304,977,849,666,538,172, 900,772,717,589,406,278,951,823,640,512,146,874,691,508,380,1002,925,797,742, 614,248,120,976,848,665,482,354,899,771,716,588,222,950,822,767,639,456,328, 873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715, 587,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740,612,246,118, 974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688, 560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274, 947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844,661, 478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920, 792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762, 634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607, 424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994, 866,683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967, 839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864,681,498, 370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812, 757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654,526, 160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785, 730,602,236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444, 316,989,861,678,550,184,912,784,729,601,418,290,963,835,652,524,158,886,703, 575,392,264,1014,937,809,754,626,132,988,860,677,494,366,911,783,728,600,234, 962,834,651,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676, 548,182,910,782,727,599,416,288,961,833,650,522,156,884,701,573,390,262,1012, 935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832,649,466, 338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,180,908,780, 725,597,414,286,959,831,648,520,154,882,699,571,388,260,1010,805,750,128,984, 673,362,907,596,230,830,464,698,1009,932,621,310,855,544,178,778,723,412,957, 646,880,569,258] [ns_server:debug,2014-08-19T16:50:21.748,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{444, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:21.752,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{955, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:21.752,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",955, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.5888.1>) [ns_server:debug,2014-08-19T16:50:21.752,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 955) [ns_server:debug,2014-08-19T16:50:21.753,ns_1@10.242.238.88:<0.5889.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:21.753,ns_1@10.242.238.88:<0.5889.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:21.753,ns_1@10.242.238.88:<0.5888.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 955 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:21.753,ns_1@10.242.238.88:<0.5894.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 955 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:21.753,ns_1@10.242.238.88:<0.5895.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 955 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:21.757,ns_1@10.242.238.88:<0.5896.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 955 into 'ns_1@10.242.238.90' is <18125.22872.0> [ns_server:debug,2014-08-19T16:50:21.760,ns_1@10.242.238.88:<0.5896.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 955 into 'ns_1@10.242.238.91' is <18126.24720.0> [rebalance:debug,2014-08-19T16:50:21.760,ns_1@10.242.238.88:<0.5888.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 955 is <0.5896.1> [ns_server:debug,2014-08-19T16:50:21.791,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,782356}, tap_estimate, {replica_building,"default",955,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.22872.0>, <<"replication_building_955_'ns_1@10.242.238.90'">>} [views:debug,2014-08-19T16:50:21.804,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/565. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:21.804,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",565,active,0} [ns_server:debug,2014-08-19T16:50:21.804,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,795708}, tap_estimate, {replica_building,"default",955,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24720.0>, <<"replication_building_955_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:21.805,ns_1@10.242.238.88:<0.5897.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.24720.0>}, {'ns_1@10.242.238.90',<18125.22872.0>}]) [rebalance:info,2014-08-19T16:50:21.805,ns_1@10.242.238.88:<0.5888.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:21.805,ns_1@10.242.238.88:<0.5888.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 955 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:21.806,ns_1@10.242.238.88:<0.5888.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:21.807,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{955, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:21.810,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{699, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:21.810,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",699, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.5909.1>) [ns_server:debug,2014-08-19T16:50:21.811,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 699) [ns_server:debug,2014-08-19T16:50:21.811,ns_1@10.242.238.88:<0.5910.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:21.811,ns_1@10.242.238.88:<0.5910.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:21.811,ns_1@10.242.238.88:<0.5909.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 699 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:21.811,ns_1@10.242.238.88:<0.5915.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 699 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:21.812,ns_1@10.242.238.88:<0.5916.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 699 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:21.815,ns_1@10.242.238.88:<0.5917.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 699 into 'ns_1@10.242.238.91' is <18126.24726.0> [ns_server:debug,2014-08-19T16:50:21.817,ns_1@10.242.238.88:<0.5917.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 699 into 'ns_1@10.242.238.90' is <18125.22877.0> [rebalance:debug,2014-08-19T16:50:21.818,ns_1@10.242.238.88:<0.5909.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 699 is <0.5917.1> [ns_server:debug,2014-08-19T16:50:21.848,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,839303}, tap_estimate, {replica_building,"default",699,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24726.0>, <<"replication_building_699_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:21.863,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,854694}, tap_estimate, {replica_building,"default",699,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.22877.0>, <<"replication_building_699_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:21.864,ns_1@10.242.238.88:<0.5918.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.22877.0>}, {'ns_1@10.242.238.91',<18126.24726.0>}]) [rebalance:info,2014-08-19T16:50:21.864,ns_1@10.242.238.88:<0.5909.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:21.864,ns_1@10.242.238.88:<0.5909.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 699 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:21.865,ns_1@10.242.238.88:<0.5909.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:21.866,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{699, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:21.869,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{443, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:21.870,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",443, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.5938.1>) [ns_server:debug,2014-08-19T16:50:21.870,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 443) [ns_server:debug,2014-08-19T16:50:21.870,ns_1@10.242.238.88:<0.5939.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:21.870,ns_1@10.242.238.88:<0.5939.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:21.870,ns_1@10.242.238.88:<0.5938.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 443 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:21.871,ns_1@10.242.238.88:<0.5948.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 443 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:21.871,ns_1@10.242.238.88:<0.5949.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 443 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:21.874,ns_1@10.242.238.88:<0.5952.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 443 into 'ns_1@10.242.238.91' is <18126.24731.0> [ns_server:debug,2014-08-19T16:50:21.877,ns_1@10.242.238.88:<0.5952.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 443 into 'ns_1@10.242.238.89' is <18124.27193.0> [rebalance:debug,2014-08-19T16:50:21.877,ns_1@10.242.238.88:<0.5938.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 443 is <0.5952.1> [ns_server:debug,2014-08-19T16:50:21.904,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 563. Nacking mccouch update. [views:debug,2014-08-19T16:50:21.904,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/563. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:21.904,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",563,active,0} [ns_server:debug,2014-08-19T16:50:21.906,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,829,518,152,697,386,1008,931,803,748,620,254,126,982,854,671,488,360,905, 777,722,594,228,956,828,645,462,334,879,696,568,202,1007,930,802,747,619,436, 308,981,853,670,542,176,904,776,721,593,410,282,955,827,644,516,150,878,695, 567,384,256,1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720,592, 226,954,826,643,460,332,877,694,566,200,1005,928,800,745,617,434,306,979,851, 668,540,174,902,774,719,591,408,280,953,825,642,514,148,876,693,565,510,382, 1004,927,799,744,616,250,122,978,850,667,484,356,901,773,718,590,224,952,824, 641,458,330,875,692,564,198,1003,926,798,743,615,432,304,977,849,666,538,172, 900,772,717,589,406,278,951,823,640,512,146,874,691,563,508,380,1002,925,797, 742,614,248,120,976,848,665,482,354,899,771,716,588,222,950,822,767,639,456, 328,873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770, 715,587,404,276,949,821,766,638,144,872,689,506,378,1000,923,795,740,612,246, 118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871, 688,560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402, 274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972,844, 661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192, 920,792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817, 762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580, 214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294, 967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864,681, 498,370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940, 812,757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654, 526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,496,368,913, 785,730,602,236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627, 444,316,989,861,678,550,184,912,784,729,601,418,290,963,835,652,524,158,886, 703,575,392,264,1014,937,809,754,626,132,988,860,677,494,366,911,783,728,600, 234,962,834,651,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859, 676,548,182,910,782,727,599,416,288,961,833,650,522,156,884,701,573,390,262, 1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832,649, 466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,180,908, 780,725,597,414,286,959,831,648,520,154,882,699,571,388,260,1010,805,750,128, 984,673,362,907,596,230,830,464,698,1009,932,621,310,855,544,178,778,723,412, 957,646,880,569,258] [ns_server:debug,2014-08-19T16:50:21.909,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,900095}, tap_estimate, {replica_building,"default",443,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24731.0>, <<"replication_building_443_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:21.923,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,914253}, tap_estimate, {replica_building,"default",443,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27193.0>, <<"replication_building_443_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:21.923,ns_1@10.242.238.88:<0.5953.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27193.0>}, {'ns_1@10.242.238.91',<18126.24731.0>}]) [rebalance:info,2014-08-19T16:50:21.924,ns_1@10.242.238.88:<0.5938.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:21.924,ns_1@10.242.238.88:<0.5938.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 443 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:21.925,ns_1@10.242.238.88:<0.5938.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:21.925,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{443, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:21.929,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{954, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:21.929,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",954, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.5965.1>) [ns_server:debug,2014-08-19T16:50:21.929,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 954) [ns_server:debug,2014-08-19T16:50:21.930,ns_1@10.242.238.88:<0.5966.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:21.930,ns_1@10.242.238.88:<0.5966.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:21.930,ns_1@10.242.238.88:<0.5965.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 954 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:21.930,ns_1@10.242.238.88:<0.5971.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 954 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:21.930,ns_1@10.242.238.88:<0.5972.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 954 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:21.934,ns_1@10.242.238.88:<0.5973.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 954 into 'ns_1@10.242.238.90' is <18125.22897.0> [ns_server:debug,2014-08-19T16:50:21.937,ns_1@10.242.238.88:<0.5973.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 954 into 'ns_1@10.242.238.91' is <18126.24736.0> [rebalance:debug,2014-08-19T16:50:21.937,ns_1@10.242.238.88:<0.5965.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 954 is <0.5973.1> [views:debug,2014-08-19T16:50:21.963,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/563. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:21.963,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",563,active,0} [ns_server:debug,2014-08-19T16:50:21.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,958949}, tap_estimate, {replica_building,"default",954,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.22897.0>, <<"replication_building_954_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:21.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452621,973704}, tap_estimate, {replica_building,"default",954,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24736.0>, <<"replication_building_954_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:21.983,ns_1@10.242.238.88:<0.5974.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.24736.0>}, {'ns_1@10.242.238.90',<18125.22897.0>}]) [rebalance:info,2014-08-19T16:50:21.983,ns_1@10.242.238.88:<0.5965.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:21.983,ns_1@10.242.238.88:<0.5965.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 954 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:21.984,ns_1@10.242.238.88:<0.5965.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:21.984,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{954, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:21.988,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{698, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:21.988,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",698, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.5986.1>) [ns_server:debug,2014-08-19T16:50:21.989,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 698) [ns_server:debug,2014-08-19T16:50:21.989,ns_1@10.242.238.88:<0.5987.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:21.989,ns_1@10.242.238.88:<0.5987.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:21.989,ns_1@10.242.238.88:<0.5986.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 698 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:21.989,ns_1@10.242.238.88:<0.5992.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 698 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:21.990,ns_1@10.242.238.88:<0.5993.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 698 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:21.993,ns_1@10.242.238.88:<0.5994.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 698 into 'ns_1@10.242.238.91' is <18126.24756.0> [ns_server:debug,2014-08-19T16:50:21.996,ns_1@10.242.238.88:<0.5994.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 698 into 'ns_1@10.242.238.90' is <18125.22916.0> [rebalance:debug,2014-08-19T16:50:21.996,ns_1@10.242.238.88:<0.5986.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 698 is <0.5994.1> [ns_server:debug,2014-08-19T16:50:22.026,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,17633}, tap_estimate, {replica_building,"default",698,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24756.0>, <<"replication_building_698_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:22.041,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,32239}, tap_estimate, {replica_building,"default",698,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.22916.0>, <<"replication_building_698_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:22.041,ns_1@10.242.238.88:<0.5995.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.22916.0>}, {'ns_1@10.242.238.91',<18126.24756.0>}]) [rebalance:info,2014-08-19T16:50:22.041,ns_1@10.242.238.88:<0.5986.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:22.042,ns_1@10.242.238.88:<0.5986.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 698 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:22.042,ns_1@10.242.238.88:<0.5986.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:22.043,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{698, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:22.047,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{442, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:22.047,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",442, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.6021.1>) [ns_server:debug,2014-08-19T16:50:22.047,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 442) [ns_server:debug,2014-08-19T16:50:22.047,ns_1@10.242.238.88:<0.6022.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:22.047,ns_1@10.242.238.88:<0.6022.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:22.048,ns_1@10.242.238.88:<0.6021.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 442 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:22.048,ns_1@10.242.238.88:<0.6027.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 442 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:22.048,ns_1@10.242.238.88:<0.6028.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 442 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:22.052,ns_1@10.242.238.88:<0.6029.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 442 into 'ns_1@10.242.238.91' is <18126.24761.0> [ns_server:debug,2014-08-19T16:50:22.054,ns_1@10.242.238.88:<0.6029.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 442 into 'ns_1@10.242.238.89' is <18124.27200.0> [rebalance:debug,2014-08-19T16:50:22.054,ns_1@10.242.238.88:<0.6021.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 442 is <0.6029.1> [ns_server:debug,2014-08-19T16:50:22.084,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 561. Nacking mccouch update. [views:debug,2014-08-19T16:50:22.084,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/561. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:22.084,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",561,active,0} [ns_server:debug,2014-08-19T16:50:22.086,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,77189}, tap_estimate, {replica_building,"default",442,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24761.0>, <<"replication_building_442_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:22.086,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,829,518,152,697,386,1008,931,803,748,620,254,126,982,854,671,488,360,905, 777,722,594,228,956,828,645,462,334,879,696,568,202,1007,930,802,747,619,436, 308,981,853,670,542,176,904,776,721,593,410,282,955,827,644,516,150,878,695, 567,384,256,1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720,592, 226,954,826,643,460,332,877,694,566,200,1005,928,800,745,617,434,306,979,851, 668,540,174,902,774,719,591,408,280,953,825,642,514,148,876,693,565,510,382, 1004,927,799,744,616,250,122,978,850,667,484,356,901,773,718,590,224,952,824, 641,458,330,875,692,564,198,1003,926,798,743,615,432,304,977,849,666,538,172, 900,772,717,589,406,278,951,823,640,512,146,874,691,563,508,380,1002,925,797, 742,614,248,120,976,848,665,482,354,899,771,716,588,222,950,822,767,639,456, 328,873,690,562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770, 715,587,404,276,949,821,766,638,144,872,689,561,506,378,1000,923,795,740,612, 246,118,974,846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999, 871,688,560,194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,585, 402,274,947,819,764,636,142,998,870,687,504,376,921,793,738,610,244,116,972, 844,661,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558, 192,920,792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945, 817,762,634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476, 348,893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790, 735,607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632, 138,994,866,683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708, 580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422, 294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864, 681,498,370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017, 940,812,757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837, 654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,496,368, 913,785,730,602,236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755, 627,444,316,989,861,678,550,184,912,784,729,601,418,290,963,835,652,524,158, 886,703,575,392,264,1014,937,809,754,626,132,988,860,677,494,366,911,783,728, 600,234,962,834,651,468,340,885,702,574,208,1013,936,808,753,625,442,314,987, 859,676,548,182,910,782,727,599,416,288,961,833,650,522,156,884,701,573,390, 262,1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832, 649,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,180, 908,780,725,597,414,286,959,831,648,520,154,882,699,571,388,260,1010,805,750, 128,984,673,362,907,596,230,830,464,698,1009,932,621,310,855,544,178,778,723, 412,957,646,880,569,258] [ns_server:debug,2014-08-19T16:50:22.104,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,95245}, tap_estimate, {replica_building,"default",442,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27200.0>, <<"replication_building_442_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:22.104,ns_1@10.242.238.88:<0.6030.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27200.0>}, {'ns_1@10.242.238.91',<18126.24761.0>}]) [rebalance:info,2014-08-19T16:50:22.104,ns_1@10.242.238.88:<0.6021.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:22.105,ns_1@10.242.238.88:<0.6021.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 442 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:22.106,ns_1@10.242.238.88:<0.6021.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:22.106,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{442, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:22.110,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{953, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:22.110,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",953, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.6042.1>) [ns_server:debug,2014-08-19T16:50:22.110,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 953) [ns_server:debug,2014-08-19T16:50:22.110,ns_1@10.242.238.88:<0.6043.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:22.111,ns_1@10.242.238.88:<0.6043.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:22.111,ns_1@10.242.238.88:<0.6042.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 953 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:22.111,ns_1@10.242.238.88:<0.6048.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 953 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:22.111,ns_1@10.242.238.88:<0.6049.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 953 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:22.116,ns_1@10.242.238.88:<0.6050.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 953 into 'ns_1@10.242.238.90' is <18125.22936.0> [ns_server:debug,2014-08-19T16:50:22.118,ns_1@10.242.238.88:<0.6050.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 953 into 'ns_1@10.242.238.91' is <18126.24780.0> [rebalance:debug,2014-08-19T16:50:22.118,ns_1@10.242.238.88:<0.6042.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 953 is <0.6050.1> [views:debug,2014-08-19T16:50:22.143,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/561. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:22.143,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",561,active,0} [ns_server:debug,2014-08-19T16:50:22.148,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,139967}, tap_estimate, {replica_building,"default",953,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.22936.0>, <<"replication_building_953_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:22.166,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,157854}, tap_estimate, {replica_building,"default",953,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24780.0>, <<"replication_building_953_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:22.167,ns_1@10.242.238.88:<0.6051.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.24780.0>}, {'ns_1@10.242.238.90',<18125.22936.0>}]) [rebalance:info,2014-08-19T16:50:22.167,ns_1@10.242.238.88:<0.6042.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:22.168,ns_1@10.242.238.88:<0.6042.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 953 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:22.168,ns_1@10.242.238.88:<0.6042.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:22.169,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{953, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:22.173,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{697, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:22.173,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",697, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.6063.1>) [ns_server:debug,2014-08-19T16:50:22.173,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 697) [ns_server:debug,2014-08-19T16:50:22.173,ns_1@10.242.238.88:<0.6064.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:22.173,ns_1@10.242.238.88:<0.6064.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:22.173,ns_1@10.242.238.88:<0.6063.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 697 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:22.174,ns_1@10.242.238.88:<0.6069.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 697 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:22.174,ns_1@10.242.238.88:<0.6070.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 697 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:22.178,ns_1@10.242.238.88:<0.6071.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 697 into 'ns_1@10.242.238.91' is <18126.24800.0> [ns_server:debug,2014-08-19T16:50:22.181,ns_1@10.242.238.88:<0.6071.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 697 into 'ns_1@10.242.238.90' is <18125.22941.0> [rebalance:debug,2014-08-19T16:50:22.181,ns_1@10.242.238.88:<0.6063.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 697 is <0.6071.1> [ns_server:debug,2014-08-19T16:50:22.211,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,202718}, tap_estimate, {replica_building,"default",697,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24800.0>, <<"replication_building_697_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:22.218,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 559. Nacking mccouch update. [views:debug,2014-08-19T16:50:22.218,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/559. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:22.219,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",559,active,0} [ns_server:debug,2014-08-19T16:50:22.220,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,829,518,152,697,386,1008,931,620,254,982,854,671,488,360,905,777,722,594, 228,956,828,645,462,334,879,696,568,202,1007,930,802,747,619,436,308,981,853, 670,542,176,904,776,721,593,410,282,955,827,644,516,150,878,695,567,384,256, 1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720,592,226,954,826, 643,460,332,877,694,566,200,1005,928,800,745,617,434,306,979,851,668,540,174, 902,774,719,591,408,280,953,825,642,514,148,876,693,565,510,382,1004,927,799, 744,616,250,122,978,850,667,484,356,901,773,718,590,224,952,824,641,458,330, 875,692,564,198,1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717, 589,406,278,951,823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248, 120,976,848,665,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690, 562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404, 276,949,821,766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974, 846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560, 194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947, 819,764,636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661, 478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920, 792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762, 634,140,996,868,685,502,374,919,791,736,608,242,114,970,842,659,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607, 424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994, 866,683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967, 839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864,681,498, 370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812, 757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654,526, 160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,496,368,913,785, 730,602,236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627,444, 316,989,861,678,550,184,912,784,729,601,418,290,963,835,652,524,158,886,703, 575,392,264,1014,937,809,754,626,132,988,860,677,494,366,911,783,728,600,234, 962,834,651,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676, 548,182,910,782,727,599,416,288,961,833,650,522,156,884,701,573,390,262,1012, 935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832,649,466, 338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,180,908,780, 725,597,414,286,959,831,648,520,154,882,699,571,388,260,1010,805,750,128,984, 673,362,907,596,230,830,464,698,1009,932,621,310,855,544,178,778,723,412,957, 646,880,569,258,803,748,126] [ns_server:debug,2014-08-19T16:50:22.226,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,217286}, tap_estimate, {replica_building,"default",697,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.22941.0>, <<"replication_building_697_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:22.226,ns_1@10.242.238.88:<0.6080.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.22941.0>}, {'ns_1@10.242.238.91',<18126.24800.0>}]) [rebalance:info,2014-08-19T16:50:22.226,ns_1@10.242.238.88:<0.6063.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:22.227,ns_1@10.242.238.88:<0.6063.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 697 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:22.228,ns_1@10.242.238.88:<0.6063.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:22.228,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{697, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:22.232,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{441, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:22.232,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",441, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.6098.1>) [ns_server:debug,2014-08-19T16:50:22.232,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 441) [ns_server:debug,2014-08-19T16:50:22.233,ns_1@10.242.238.88:<0.6099.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:22.233,ns_1@10.242.238.88:<0.6099.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:22.233,ns_1@10.242.238.88:<0.6098.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 441 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:22.233,ns_1@10.242.238.88:<0.6104.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 441 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:22.233,ns_1@10.242.238.88:<0.6105.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 441 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:22.237,ns_1@10.242.238.88:<0.6106.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 441 into 'ns_1@10.242.238.91' is <18126.24805.0> [ns_server:debug,2014-08-19T16:50:22.239,ns_1@10.242.238.88:<0.6106.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 441 into 'ns_1@10.242.238.89' is <18124.27220.0> [rebalance:debug,2014-08-19T16:50:22.239,ns_1@10.242.238.88:<0.6098.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 441 is <0.6106.1> [views:debug,2014-08-19T16:50:22.252,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/559. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:22.252,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",559,active,0} [ns_server:debug,2014-08-19T16:50:22.270,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,261707}, tap_estimate, {replica_building,"default",441,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24805.0>, <<"replication_building_441_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:22.284,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,275589}, tap_estimate, {replica_building,"default",441,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27220.0>, <<"replication_building_441_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:22.285,ns_1@10.242.238.88:<0.6107.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27220.0>}, {'ns_1@10.242.238.91',<18126.24805.0>}]) [rebalance:info,2014-08-19T16:50:22.285,ns_1@10.242.238.88:<0.6098.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:22.286,ns_1@10.242.238.88:<0.6098.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 441 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:22.286,ns_1@10.242.238.88:<0.6098.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:22.287,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{441, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:22.291,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{952, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:22.291,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",952, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.6127.1>) [ns_server:debug,2014-08-19T16:50:22.291,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 952) [ns_server:debug,2014-08-19T16:50:22.291,ns_1@10.242.238.88:<0.6128.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:22.291,ns_1@10.242.238.88:<0.6128.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:22.291,ns_1@10.242.238.88:<0.6127.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 952 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:22.292,ns_1@10.242.238.88:<0.6133.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 952 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:22.292,ns_1@10.242.238.88:<0.6134.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 952 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:22.296,ns_1@10.242.238.88:<0.6140.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 952 into 'ns_1@10.242.238.90' is <18125.22961.0> [ns_server:debug,2014-08-19T16:50:22.298,ns_1@10.242.238.88:<0.6140.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 952 into 'ns_1@10.242.238.91' is <18126.24824.0> [rebalance:debug,2014-08-19T16:50:22.298,ns_1@10.242.238.88:<0.6127.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 952 is <0.6140.1> [ns_server:debug,2014-08-19T16:50:22.327,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 557. Nacking mccouch update. [views:debug,2014-08-19T16:50:22.328,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/557. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:22.328,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",557,active,0} [ns_server:debug,2014-08-19T16:50:22.328,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,319872}, tap_estimate, {replica_building,"default",952,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.22961.0>, <<"replication_building_952_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:22.329,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,829,518,152,697,386,1008,931,620,254,982,854,671,488,360,905,777,722,594, 228,956,828,645,462,334,879,696,568,202,1007,930,802,747,619,436,308,981,853, 670,542,176,904,776,721,593,410,282,955,827,644,516,150,878,695,567,384,256, 1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720,592,226,954,826, 643,460,332,877,694,566,200,1005,928,800,745,617,434,306,979,851,668,540,174, 902,774,719,591,408,280,953,825,642,514,148,876,693,565,510,382,1004,927,799, 744,616,250,122,978,850,667,484,356,901,773,718,590,224,952,824,641,458,330, 875,692,564,198,1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717, 589,406,278,951,823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248, 120,976,848,665,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690, 562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404, 276,949,821,766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974, 846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560, 194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947, 819,764,636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661, 478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920, 792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762, 634,140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580, 214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294, 967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864,681, 498,370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940, 812,757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654, 526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,496,368,913, 785,730,602,236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627, 444,316,989,861,678,550,184,912,784,729,601,418,290,963,835,652,524,158,886, 703,575,392,264,1014,937,809,754,626,132,988,860,677,494,366,911,783,728,600, 234,962,834,651,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859, 676,548,182,910,782,727,599,416,288,961,833,650,522,156,884,701,573,390,262, 1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832,649, 466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,180,908, 780,725,597,414,286,959,831,648,520,154,882,699,571,388,260,1010,805,750,128, 984,673,362,907,596,230,830,464,698,1009,932,621,310,855,544,178,778,723,412, 957,646,880,569,258,803,748,126] [ns_server:debug,2014-08-19T16:50:22.344,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,335254}, tap_estimate, {replica_building,"default",952,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24824.0>, <<"replication_building_952_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:22.344,ns_1@10.242.238.88:<0.6142.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.24824.0>}, {'ns_1@10.242.238.90',<18125.22961.0>}]) [rebalance:info,2014-08-19T16:50:22.344,ns_1@10.242.238.88:<0.6127.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:22.345,ns_1@10.242.238.88:<0.6127.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 952 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:22.345,ns_1@10.242.238.88:<0.6127.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:22.346,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{952, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:22.350,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{696, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:22.350,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",696, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.6154.1>) [ns_server:debug,2014-08-19T16:50:22.350,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 696) [ns_server:debug,2014-08-19T16:50:22.350,ns_1@10.242.238.88:<0.6155.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:22.350,ns_1@10.242.238.88:<0.6155.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:22.351,ns_1@10.242.238.88:<0.6154.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 696 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:22.351,ns_1@10.242.238.88:<0.6160.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 696 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:22.351,ns_1@10.242.238.88:<0.6161.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 696 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:22.355,ns_1@10.242.238.88:<0.6162.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 696 into 'ns_1@10.242.238.91' is <18126.24830.0> [ns_server:debug,2014-08-19T16:50:22.357,ns_1@10.242.238.88:<0.6162.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 696 into 'ns_1@10.242.238.90' is <18125.22966.0> [rebalance:debug,2014-08-19T16:50:22.357,ns_1@10.242.238.88:<0.6154.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 696 is <0.6162.1> [views:debug,2014-08-19T16:50:22.361,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/557. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:22.362,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",557,active,0} [ns_server:debug,2014-08-19T16:50:22.387,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,378660}, tap_estimate, {replica_building,"default",696,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24830.0>, <<"replication_building_696_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:22.406,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,397323}, tap_estimate, {replica_building,"default",696,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.22966.0>, <<"replication_building_696_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:22.406,ns_1@10.242.238.88:<0.6163.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.22966.0>}, {'ns_1@10.242.238.91',<18126.24830.0>}]) [rebalance:info,2014-08-19T16:50:22.406,ns_1@10.242.238.88:<0.6154.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:22.407,ns_1@10.242.238.88:<0.6154.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 696 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:22.408,ns_1@10.242.238.88:<0.6154.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:22.408,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{696, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:22.412,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{440, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:22.413,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",440, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.6189.1>) [ns_server:debug,2014-08-19T16:50:22.413,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 440) [ns_server:debug,2014-08-19T16:50:22.413,ns_1@10.242.238.88:<0.6190.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:22.413,ns_1@10.242.238.88:<0.6190.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:22.413,ns_1@10.242.238.88:<0.6189.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 440 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:22.413,ns_1@10.242.238.88:<0.6195.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 440 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:22.413,ns_1@10.242.238.88:<0.6196.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 440 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:22.417,ns_1@10.242.238.88:<0.6197.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 440 into 'ns_1@10.242.238.91' is <18126.24849.0> [ns_server:debug,2014-08-19T16:50:22.420,ns_1@10.242.238.88:<0.6197.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 440 into 'ns_1@10.242.238.89' is <18124.27240.0> [rebalance:debug,2014-08-19T16:50:22.420,ns_1@10.242.238.88:<0.6189.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 440 is <0.6197.1> [ns_server:debug,2014-08-19T16:50:22.450,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,441519}, tap_estimate, {replica_building,"default",440,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24849.0>, <<"replication_building_440_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:22.453,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 555. Nacking mccouch update. [views:debug,2014-08-19T16:50:22.453,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/555. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:22.453,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",555,active,0} [ns_server:debug,2014-08-19T16:50:22.455,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,829,518,152,697,386,1008,931,620,254,982,854,671,488,360,905,777,722,594, 228,956,828,645,462,334,879,696,568,202,1007,930,802,747,619,436,308,981,853, 670,542,176,904,776,721,593,410,282,955,827,644,516,150,878,695,567,384,256, 1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720,592,226,954,826, 643,460,332,877,694,566,200,1005,928,800,745,617,434,306,979,851,668,540,174, 902,774,719,591,408,280,953,825,642,514,148,876,693,565,510,382,1004,927,799, 744,616,250,122,978,850,667,484,356,901,773,718,590,224,952,824,641,458,330, 875,692,564,198,1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717, 589,406,278,951,823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248, 120,976,848,665,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690, 562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404, 276,949,821,766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974, 846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560, 194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947, 819,764,636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661, 478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920, 792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762, 634,140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708, 580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422, 294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864, 681,498,370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017, 940,812,757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837, 654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,496,368, 913,785,730,602,236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755, 627,444,316,989,861,678,550,184,912,784,729,601,418,290,963,835,652,524,158, 886,703,575,392,264,1014,937,809,754,626,132,988,860,677,494,366,911,783,728, 600,234,962,834,651,468,340,885,702,574,208,1013,936,808,753,625,442,314,987, 859,676,548,182,910,782,727,599,416,288,961,833,650,522,156,884,701,573,390, 262,1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832, 649,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,180, 908,780,725,597,414,286,959,831,648,520,154,882,699,571,388,260,1010,805,750, 128,984,673,362,907,596,230,830,464,698,1009,932,621,310,855,544,178,778,723, 412,957,646,880,569,258,803,748,126] [ns_server:debug,2014-08-19T16:50:22.465,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,456072}, tap_estimate, {replica_building,"default",440,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27240.0>, <<"replication_building_440_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:22.465,ns_1@10.242.238.88:<0.6198.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27240.0>}, {'ns_1@10.242.238.91',<18126.24849.0>}]) [rebalance:info,2014-08-19T16:50:22.465,ns_1@10.242.238.88:<0.6189.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:22.466,ns_1@10.242.238.88:<0.6189.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 440 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:22.467,ns_1@10.242.238.88:<0.6189.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:22.467,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{440, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:22.471,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{951, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:22.471,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",951, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.6210.1>) [ns_server:debug,2014-08-19T16:50:22.471,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 951) [ns_server:debug,2014-08-19T16:50:22.471,ns_1@10.242.238.88:<0.6211.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:22.472,ns_1@10.242.238.88:<0.6211.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:22.472,ns_1@10.242.238.88:<0.6210.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 951 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:22.472,ns_1@10.242.238.88:<0.6216.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 951 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:22.472,ns_1@10.242.238.88:<0.6217.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 951 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:22.476,ns_1@10.242.238.88:<0.6218.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 951 into 'ns_1@10.242.238.90' is <18125.22986.0> [ns_server:debug,2014-08-19T16:50:22.478,ns_1@10.242.238.88:<0.6218.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 951 into 'ns_1@10.242.238.91' is <18126.24854.0> [rebalance:debug,2014-08-19T16:50:22.478,ns_1@10.242.238.88:<0.6210.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 951 is <0.6218.1> [views:debug,2014-08-19T16:50:22.504,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/555. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:22.504,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",555,active,0} [ns_server:debug,2014-08-19T16:50:22.508,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,499597}, tap_estimate, {replica_building,"default",951,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.22986.0>, <<"replication_building_951_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:22.525,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,516447}, tap_estimate, {replica_building,"default",951,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24854.0>, <<"replication_building_951_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:22.526,ns_1@10.242.238.88:<0.6219.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.24854.0>}, {'ns_1@10.242.238.90',<18125.22986.0>}]) [rebalance:info,2014-08-19T16:50:22.526,ns_1@10.242.238.88:<0.6210.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:22.526,ns_1@10.242.238.88:<0.6210.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 951 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:22.527,ns_1@10.242.238.88:<0.6210.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:22.527,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{951, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:22.531,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{695, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:22.531,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",695, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.6231.1>) [ns_server:debug,2014-08-19T16:50:22.532,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 695) [ns_server:debug,2014-08-19T16:50:22.532,ns_1@10.242.238.88:<0.6232.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:22.532,ns_1@10.242.238.88:<0.6232.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:22.532,ns_1@10.242.238.88:<0.6231.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 695 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:22.532,ns_1@10.242.238.88:<0.6237.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 695 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:22.533,ns_1@10.242.238.88:<0.6238.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 695 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:22.536,ns_1@10.242.238.88:<0.6239.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 695 into 'ns_1@10.242.238.91' is <18126.24866.0> [ns_server:debug,2014-08-19T16:50:22.539,ns_1@10.242.238.88:<0.6239.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 695 into 'ns_1@10.242.238.90' is <18125.22991.0> [rebalance:debug,2014-08-19T16:50:22.539,ns_1@10.242.238.88:<0.6231.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 695 is <0.6239.1> [ns_server:debug,2014-08-19T16:50:22.569,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,560154}, tap_estimate, {replica_building,"default",695,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24866.0>, <<"replication_building_695_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:22.585,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,576094}, tap_estimate, {replica_building,"default",695,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.22991.0>, <<"replication_building_695_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:22.585,ns_1@10.242.238.88:<0.6240.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.22991.0>}, {'ns_1@10.242.238.91',<18126.24866.0>}]) [rebalance:info,2014-08-19T16:50:22.585,ns_1@10.242.238.88:<0.6231.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:22.586,ns_1@10.242.238.88:<0.6231.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 695 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:22.587,ns_1@10.242.238.88:<0.6231.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:22.587,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{695, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:22.591,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{439, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:22.591,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",439, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.6266.1>) [ns_server:debug,2014-08-19T16:50:22.591,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 439) [ns_server:debug,2014-08-19T16:50:22.592,ns_1@10.242.238.88:<0.6267.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:22.592,ns_1@10.242.238.88:<0.6267.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:22.592,ns_1@10.242.238.88:<0.6266.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 439 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:22.592,ns_1@10.242.238.88:<0.6272.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 439 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:22.592,ns_1@10.242.238.88:<0.6273.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 439 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:22.598,ns_1@10.242.238.88:<0.6274.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 439 into 'ns_1@10.242.238.91' is <18126.24885.0> [ns_server:debug,2014-08-19T16:50:22.601,ns_1@10.242.238.88:<0.6274.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 439 into 'ns_1@10.242.238.89' is <18124.27260.0> [rebalance:debug,2014-08-19T16:50:22.601,ns_1@10.242.238.88:<0.6266.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 439 is <0.6274.1> [ns_server:debug,2014-08-19T16:50:22.629,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 553. Nacking mccouch update. [views:debug,2014-08-19T16:50:22.629,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/553. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:22.630,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",553,active,0} [ns_server:debug,2014-08-19T16:50:22.631,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,829,518,152,697,386,1008,931,620,254,982,854,671,488,360,905,777,722,594, 228,956,828,645,462,334,879,696,568,202,1007,930,802,747,619,436,308,981,853, 670,542,176,904,776,721,593,410,282,955,827,644,516,150,878,695,567,384,256, 1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720,592,226,954,826, 643,460,332,877,694,566,200,1005,928,800,745,617,434,306,979,851,668,540,174, 902,774,719,591,408,280,953,825,642,514,148,876,693,565,510,382,1004,927,799, 744,616,250,122,978,850,667,484,356,901,773,718,590,224,952,824,641,458,330, 875,692,564,198,1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717, 589,406,278,951,823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248, 120,976,848,665,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690, 562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404, 276,949,821,766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974, 846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560, 194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947, 819,764,636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661, 478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920, 792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762, 634,140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708, 580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422, 294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864, 681,553,498,370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212, 1017,940,812,757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965, 837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,496, 368,913,785,730,602,236,108,964,836,653,470,342,887,704,576,210,1015,938,810, 755,627,444,316,989,861,678,550,184,912,784,729,601,418,290,963,835,652,524, 158,886,703,575,392,264,1014,937,809,754,626,132,988,860,677,494,366,911,783, 728,600,234,962,834,651,468,340,885,702,574,208,1013,936,808,753,625,442,314, 987,859,676,548,182,910,782,727,599,416,288,961,833,650,522,156,884,701,573, 390,262,1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960, 832,649,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546, 180,908,780,725,597,414,286,959,831,648,520,154,882,699,571,388,260,1010,805, 750,128,984,673,362,907,596,230,830,464,698,1009,932,621,310,855,544,178,778, 723,412,957,646,880,569,258,803,748,126] [ns_server:debug,2014-08-19T16:50:22.634,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,625161}, tap_estimate, {replica_building,"default",439,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24885.0>, <<"replication_building_439_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:22.646,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,637815}, tap_estimate, {replica_building,"default",439,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27260.0>, <<"replication_building_439_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:22.648,ns_1@10.242.238.88:<0.6275.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27260.0>}, {'ns_1@10.242.238.91',<18126.24885.0>}]) [rebalance:info,2014-08-19T16:50:22.648,ns_1@10.242.238.88:<0.6266.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:22.648,ns_1@10.242.238.88:<0.6266.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 439 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:22.649,ns_1@10.242.238.88:<0.6266.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:22.649,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{439, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:22.653,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{950, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:22.653,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",950, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.6287.1>) [ns_server:debug,2014-08-19T16:50:22.653,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 950) [ns_server:debug,2014-08-19T16:50:22.654,ns_1@10.242.238.88:<0.6288.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:22.654,ns_1@10.242.238.88:<0.6288.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:22.654,ns_1@10.242.238.88:<0.6287.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 950 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:22.654,ns_1@10.242.238.88:<0.6293.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 950 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:22.654,ns_1@10.242.238.88:<0.6294.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 950 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:22.658,ns_1@10.242.238.88:<0.6295.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 950 into 'ns_1@10.242.238.90' is <18125.22997.0> [ns_server:debug,2014-08-19T16:50:22.661,ns_1@10.242.238.88:<0.6295.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 950 into 'ns_1@10.242.238.91' is <18126.24890.0> [rebalance:debug,2014-08-19T16:50:22.661,ns_1@10.242.238.88:<0.6287.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 950 is <0.6295.1> [ns_server:debug,2014-08-19T16:50:22.692,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,683270}, tap_estimate, {replica_building,"default",950,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.22997.0>, <<"replication_building_950_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:22.704,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,695952}, tap_estimate, {replica_building,"default",950,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24890.0>, <<"replication_building_950_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:22.705,ns_1@10.242.238.88:<0.6296.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.24890.0>}, {'ns_1@10.242.238.90',<18125.22997.0>}]) [rebalance:info,2014-08-19T16:50:22.705,ns_1@10.242.238.88:<0.6287.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:22.706,ns_1@10.242.238.88:<0.6287.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 950 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:22.706,ns_1@10.242.238.88:<0.6287.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:22.707,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{950, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:22.711,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{694, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:22.711,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",694, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.6308.1>) [ns_server:debug,2014-08-19T16:50:22.711,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 694) [ns_server:debug,2014-08-19T16:50:22.711,ns_1@10.242.238.88:<0.6309.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:22.711,ns_1@10.242.238.88:<0.6309.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:22.712,ns_1@10.242.238.88:<0.6308.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 694 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:22.712,ns_1@10.242.238.88:<0.6314.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 694 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:22.712,ns_1@10.242.238.88:<0.6315.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 694 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [views:debug,2014-08-19T16:50:22.713,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/553. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:22.713,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",553,active,0} [ns_server:debug,2014-08-19T16:50:22.717,ns_1@10.242.238.88:<0.6316.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 694 into 'ns_1@10.242.238.91' is <18126.24896.0> [ns_server:debug,2014-08-19T16:50:22.720,ns_1@10.242.238.88:<0.6316.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 694 into 'ns_1@10.242.238.90' is <18125.23016.0> [rebalance:debug,2014-08-19T16:50:22.720,ns_1@10.242.238.88:<0.6308.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 694 is <0.6316.1> [ns_server:debug,2014-08-19T16:50:22.749,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,740356}, tap_estimate, {replica_building,"default",694,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24896.0>, <<"replication_building_694_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:22.765,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,756850}, tap_estimate, {replica_building,"default",694,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23016.0>, <<"replication_building_694_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:22.766,ns_1@10.242.238.88:<0.6317.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.23016.0>}, {'ns_1@10.242.238.91',<18126.24896.0>}]) [rebalance:info,2014-08-19T16:50:22.766,ns_1@10.242.238.88:<0.6308.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:22.767,ns_1@10.242.238.88:<0.6308.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 694 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:22.767,ns_1@10.242.238.88:<0.6308.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:22.768,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{694, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:22.771,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{438, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:22.771,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",438, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.6329.1>) [ns_server:debug,2014-08-19T16:50:22.772,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 438) [ns_server:debug,2014-08-19T16:50:22.772,ns_1@10.242.238.88:<0.6330.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:22.772,ns_1@10.242.238.88:<0.6330.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:22.772,ns_1@10.242.238.88:<0.6329.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 438 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:22.772,ns_1@10.242.238.88:<0.6336.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 438 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [rebalance:info,2014-08-19T16:50:22.772,ns_1@10.242.238.88:<0.6335.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 438 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [ns_server:debug,2014-08-19T16:50:22.777,ns_1@10.242.238.88:<0.6342.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 438 into 'ns_1@10.242.238.91' is <18126.24915.0> [ns_server:debug,2014-08-19T16:50:22.780,ns_1@10.242.238.88:<0.6342.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 438 into 'ns_1@10.242.238.89' is <18124.27280.0> [rebalance:debug,2014-08-19T16:50:22.780,ns_1@10.242.238.88:<0.6329.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 438 is <0.6342.1> [ns_server:debug,2014-08-19T16:50:22.809,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,800679}, tap_estimate, {replica_building,"default",438,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24915.0>, <<"replication_building_438_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:22.814,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 551. Nacking mccouch update. [views:debug,2014-08-19T16:50:22.814,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/551. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:22.814,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",551,active,0} [ns_server:debug,2014-08-19T16:50:22.815,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,829,518,152,697,386,1008,931,620,254,982,854,671,488,360,905,777,722,594, 228,956,828,645,462,334,879,696,568,202,1007,930,802,747,619,436,308,981,853, 670,542,176,904,776,721,593,410,282,955,827,644,516,150,878,695,567,384,256, 1006,929,801,746,618,252,124,980,852,669,486,358,903,775,720,592,226,954,826, 643,460,332,877,694,566,200,1005,928,800,745,617,434,306,979,851,668,540,174, 902,774,719,591,408,280,953,825,642,514,148,876,693,565,510,382,1004,927,799, 744,616,250,122,978,850,667,484,356,901,773,718,590,224,952,824,641,458,330, 875,692,564,198,1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717, 589,406,278,951,823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248, 120,976,848,665,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690, 562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404, 276,949,821,766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974, 846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560, 194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947, 819,764,636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661, 478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920, 792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762, 634,140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708, 580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422, 294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864, 681,553,498,370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212, 1017,940,812,757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965, 837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,551, 496,368,913,785,730,602,236,108,964,836,653,470,342,887,704,576,210,1015,938, 810,755,627,444,316,989,861,678,550,184,912,784,729,601,418,290,963,835,652, 524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860,677,494,366,911, 783,728,600,234,962,834,651,468,340,885,702,574,208,1013,936,808,753,625,442, 314,987,859,676,548,182,910,782,727,599,416,288,961,833,650,522,156,884,701, 573,390,262,1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232, 960,832,649,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674, 546,180,908,780,725,597,414,286,959,831,648,520,154,882,699,571,388,260,1010, 805,750,128,984,673,362,907,596,230,830,464,698,1009,932,621,310,855,544,178, 778,723,412,957,646,880,569,258,803,748,126] [ns_server:debug,2014-08-19T16:50:22.827,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,818539}, tap_estimate, {replica_building,"default",438,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27280.0>, <<"replication_building_438_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:22.840,ns_1@10.242.238.88:<0.6346.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27280.0>}, {'ns_1@10.242.238.91',<18126.24915.0>}]) [rebalance:info,2014-08-19T16:50:22.841,ns_1@10.242.238.88:<0.6329.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:22.842,ns_1@10.242.238.88:<0.6329.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 438 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:22.842,ns_1@10.242.238.88:<0.6329.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:22.843,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{438, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:22.846,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{949, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:22.847,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",949, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.6364.1>) [ns_server:debug,2014-08-19T16:50:22.847,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 949) [ns_server:debug,2014-08-19T16:50:22.847,ns_1@10.242.238.88:<0.6365.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:22.847,ns_1@10.242.238.88:<0.6365.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:22.847,ns_1@10.242.238.88:<0.6364.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 949 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:22.847,ns_1@10.242.238.88:<0.6370.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 949 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:22.848,ns_1@10.242.238.88:<0.6371.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 949 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:22.851,ns_1@10.242.238.88:<0.6372.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 949 into 'ns_1@10.242.238.90' is <18125.23023.0> [ns_server:debug,2014-08-19T16:50:22.854,ns_1@10.242.238.88:<0.6372.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 949 into 'ns_1@10.242.238.91' is <18126.24920.0> [rebalance:debug,2014-08-19T16:50:22.854,ns_1@10.242.238.88:<0.6364.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 949 is <0.6372.1> [views:debug,2014-08-19T16:50:22.872,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/551. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:22.873,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",551,active,0} [ns_server:debug,2014-08-19T16:50:22.884,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,875942}, tap_estimate, {replica_building,"default",949,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23023.0>, <<"replication_building_949_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:22.899,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,890364}, tap_estimate, {replica_building,"default",949,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24920.0>, <<"replication_building_949_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:22.899,ns_1@10.242.238.88:<0.6373.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.24920.0>}, {'ns_1@10.242.238.90',<18125.23023.0>}]) [rebalance:info,2014-08-19T16:50:22.900,ns_1@10.242.238.88:<0.6364.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:22.900,ns_1@10.242.238.88:<0.6364.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 949 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:22.901,ns_1@10.242.238.88:<0.6364.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:22.902,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{949, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:22.905,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{693, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:22.905,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",693, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.6385.1>) [ns_server:debug,2014-08-19T16:50:22.906,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 693) [ns_server:debug,2014-08-19T16:50:22.906,ns_1@10.242.238.88:<0.6386.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:22.906,ns_1@10.242.238.88:<0.6386.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:22.906,ns_1@10.242.238.88:<0.6385.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 693 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:22.906,ns_1@10.242.238.88:<0.6391.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 693 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:22.907,ns_1@10.242.238.88:<0.6392.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 693 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:22.910,ns_1@10.242.238.88:<0.6393.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 693 into 'ns_1@10.242.238.91' is <18126.24926.0> [ns_server:debug,2014-08-19T16:50:22.913,ns_1@10.242.238.88:<0.6393.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 693 into 'ns_1@10.242.238.90' is <18125.23036.0> [rebalance:debug,2014-08-19T16:50:22.913,ns_1@10.242.238.88:<0.6385.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 693 is <0.6393.1> [ns_server:debug,2014-08-19T16:50:22.943,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,934532}, tap_estimate, {replica_building,"default",693,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24926.0>, <<"replication_building_693_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:22.949,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,693, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_693_'ns_1@10.242.238.91'">>}]}, {move_state,949, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_949_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_949_'ns_1@10.242.238.90'">>}]}, {move_state,438, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_438_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_438_'ns_1@10.242.238.91'">>}]}, {move_state,694, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_694_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_694_'ns_1@10.242.238.91'">>}]}, {move_state,950, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_950_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_950_'ns_1@10.242.238.90'">>}]}, {move_state,439, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_439_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_439_'ns_1@10.242.238.91'">>}]}, {move_state,695, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_695_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_695_'ns_1@10.242.238.91'">>}]}, {move_state,951, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_951_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_951_'ns_1@10.242.238.90'">>}]}, {move_state,440, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_440_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_440_'ns_1@10.242.238.91'">>}]}, {move_state,696, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_696_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_696_'ns_1@10.242.238.91'">>}]}, {move_state,952, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_952_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_952_'ns_1@10.242.238.90'">>}]}, {move_state,441, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_441_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_441_'ns_1@10.242.238.91'">>}]}, {move_state,697, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_697_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_697_'ns_1@10.242.238.91'">>}]}, {move_state,953, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_953_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_953_'ns_1@10.242.238.90'">>}]}, {move_state,442, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_442_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_442_'ns_1@10.242.238.91'">>}]}, {move_state,698, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_698_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_698_'ns_1@10.242.238.91'">>}]}, {move_state,954, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_954_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_954_'ns_1@10.242.238.90'">>}]}, {move_state,443, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_443_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_443_'ns_1@10.242.238.91'">>}]}, {move_state,699, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_699_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_699_'ns_1@10.242.238.91'">>}]}, {move_state,955, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_955_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_955_'ns_1@10.242.238.90'">>}]}, {move_state,444, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_444_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_444_'ns_1@10.242.238.91'">>}]}, {move_state,700, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_700_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_700_'ns_1@10.242.238.91'">>}]}, {move_state,956, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_956_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_956_'ns_1@10.242.238.90'">>}]}, {move_state,445, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_445_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_445_'ns_1@10.242.238.91'">>}]}, {move_state,701, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_701_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_701_'ns_1@10.242.238.91'">>}]}, {move_state,957, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_957_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_957_'ns_1@10.242.238.90'">>}]}, {move_state,446, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_446_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_446_'ns_1@10.242.238.91'">>}]}, {move_state,702, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_702_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_702_'ns_1@10.242.238.91'">>}]}, {move_state,958, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_958_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_958_'ns_1@10.242.238.90'">>}]}, {move_state,447, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_447_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_447_'ns_1@10.242.238.91'">>}]}, {move_state,959, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_959_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_959_'ns_1@10.242.238.90'">>}]}, {move_state,703, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_703_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_703_'ns_1@10.242.238.91'">>}]}] [ns_server:debug,2014-08-19T16:50:22.952,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 693, [{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:22.953,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 949, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:22.954,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 438, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:22.955,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 694, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:22.955,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 950, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:22.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 439, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:22.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 695, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:22.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 951, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:22.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 440, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:22.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,949092}, tap_estimate, {replica_building,"default",693,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23036.0>, <<"replication_building_693_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:22.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 696, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:22.958,ns_1@10.242.238.88:<0.6394.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.23036.0>}, {'ns_1@10.242.238.91',<18126.24926.0>}]) [rebalance:info,2014-08-19T16:50:22.958,ns_1@10.242.238.88:<0.6385.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [ns_server:debug,2014-08-19T16:50:22.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 952, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:22.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 441, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [rebalance:info,2014-08-19T16:50:22.959,ns_1@10.242.238.88:<0.6385.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 693 on ns_1@10.242.238.88 [ns_server:debug,2014-08-19T16:50:22.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 697, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [rebalance:info,2014-08-19T16:50:22.960,ns_1@10.242.238.88:<0.6385.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:22.960,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{693, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:22.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 953, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:22.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 442, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:22.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 698, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:22.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 954, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:22.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 443, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:22.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 699, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:22.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 955, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:22.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 444, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:22.965,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{437, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [ns_server:debug,2014-08-19T16:50:22.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 700, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [rebalance:debug,2014-08-19T16:50:22.965,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",437, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.6443.1>) [ns_server:debug,2014-08-19T16:50:22.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 437) [ns_server:debug,2014-08-19T16:50:22.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 956, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:22.966,ns_1@10.242.238.88:<0.6444.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:22.966,ns_1@10.242.238.88:<0.6444.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:22.966,ns_1@10.242.238.88:<0.6443.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 437 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:22.966,ns_1@10.242.238.88:<0.6450.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 437 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:22.966,ns_1@10.242.238.88:<0.6451.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 437 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:22.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 445, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:22.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 701, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:22.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 957, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:22.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 446, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:22.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 702, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:22.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 958, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:22.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 447, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:22.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 959, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:22.971,ns_1@10.242.238.88:<0.6456.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 437 into 'ns_1@10.242.238.91' is <18126.24931.0> [ns_server:debug,2014-08-19T16:50:22.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 703, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:22.973,ns_1@10.242.238.88:<0.6456.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 437 into 'ns_1@10.242.238.89' is <18124.27314.0> [rebalance:debug,2014-08-19T16:50:22.973,ns_1@10.242.238.88:<0.6443.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 437 is <0.6456.1> [ns_server:debug,2014-08-19T16:50:23.004,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452622,995737}, tap_estimate, {replica_building,"default",437,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24931.0>, <<"replication_building_437_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:23.019,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,10299}, tap_estimate, {replica_building,"default",437,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27314.0>, <<"replication_building_437_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:23.019,ns_1@10.242.238.88:<0.6461.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27314.0>}, {'ns_1@10.242.238.91',<18126.24931.0>}]) [rebalance:info,2014-08-19T16:50:23.019,ns_1@10.242.238.88:<0.6443.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:23.020,ns_1@10.242.238.88:<0.6443.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 437 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:23.020,ns_1@10.242.238.88:<0.6443.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:23.021,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{437, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:23.025,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{948, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:23.025,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",948, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.6473.1>) [ns_server:debug,2014-08-19T16:50:23.025,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 948) [ns_server:debug,2014-08-19T16:50:23.025,ns_1@10.242.238.88:<0.6474.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:23.025,ns_1@10.242.238.88:<0.6474.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:23.026,ns_1@10.242.238.88:<0.6473.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 948 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:23.026,ns_1@10.242.238.88:<0.6479.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 948 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:23.026,ns_1@10.242.238.88:<0.6480.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 948 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:23.030,ns_1@10.242.238.88:<0.6481.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 948 into 'ns_1@10.242.238.90' is <18125.23048.0> [ns_server:debug,2014-08-19T16:50:23.032,ns_1@10.242.238.88:<0.6481.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 948 into 'ns_1@10.242.238.91' is <18126.24950.0> [rebalance:debug,2014-08-19T16:50:23.032,ns_1@10.242.238.88:<0.6473.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 948 is <0.6481.1> [ns_server:debug,2014-08-19T16:50:23.048,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 549. Nacking mccouch update. [views:debug,2014-08-19T16:50:23.048,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/549. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:23.048,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",549,active,0} [ns_server:debug,2014-08-19T16:50:23.050,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,829,518,152,697,386,1008,931,620,254,854,488,905,777,722,594,228,956,828, 645,462,334,879,696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176, 904,776,721,593,410,282,955,827,644,516,150,878,695,567,384,256,1006,929,801, 746,618,252,124,980,852,669,486,358,903,775,720,592,226,954,826,643,460,332, 877,694,566,200,1005,928,800,745,617,434,306,979,851,668,540,174,902,774,719, 591,408,280,953,825,642,514,148,876,693,565,510,382,1004,927,799,744,616,250, 122,978,850,667,484,356,901,773,718,590,224,952,824,641,458,330,875,692,564, 198,1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717,589,406,278, 951,823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976,848, 665,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,196,1001, 924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276,949,821, 766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,480, 352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,194,922,794, 739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764,636, 142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,478,350,895, 712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920,792,737,609, 426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634,140,996, 868,685,557,502,374,919,791,736,608,242,114,970,842,659,476,348,893,710,582, 216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607,424,296, 969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994,866,683, 555,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019, 942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967,839, 656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864,681,553,498, 370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812, 757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654,526, 160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,551,496,368,913, 785,730,602,236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627, 444,316,989,861,678,550,184,912,784,729,601,418,290,963,835,652,524,158,886, 703,575,392,264,1014,937,809,754,626,132,988,860,677,549,494,366,911,783,728, 600,234,962,834,651,468,340,885,702,574,208,1013,936,808,753,625,442,314,987, 859,676,548,182,910,782,727,599,416,288,961,833,650,522,156,884,701,573,390, 262,1012,935,807,752,624,130,986,858,675,492,364,909,781,726,598,232,960,832, 649,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,180, 908,780,725,597,414,286,959,831,648,520,154,882,699,571,388,260,1010,805,750, 128,984,673,362,907,596,230,830,464,698,1009,932,621,310,855,544,178,778,723, 412,957,646,880,569,258,803,748,126,982,671,360] [ns_server:debug,2014-08-19T16:50:23.065,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,56839}, tap_estimate, {replica_building,"default",948,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23048.0>, <<"replication_building_948_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:23.081,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,72215}, tap_estimate, {replica_building,"default",948,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24950.0>, <<"replication_building_948_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:23.081,ns_1@10.242.238.88:<0.6482.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.24950.0>}, {'ns_1@10.242.238.90',<18125.23048.0>}]) [rebalance:info,2014-08-19T16:50:23.081,ns_1@10.242.238.88:<0.6473.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:23.082,ns_1@10.242.238.88:<0.6473.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 948 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:23.082,ns_1@10.242.238.88:<0.6473.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:23.083,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{948, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:23.086,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{692, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:23.087,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",692, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.6494.1>) [ns_server:debug,2014-08-19T16:50:23.087,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 692) [ns_server:debug,2014-08-19T16:50:23.087,ns_1@10.242.238.88:<0.6495.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:23.087,ns_1@10.242.238.88:<0.6495.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:23.087,ns_1@10.242.238.88:<0.6494.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 692 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:23.088,ns_1@10.242.238.88:<0.6500.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 692 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:23.088,ns_1@10.242.238.88:<0.6501.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 692 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:23.091,ns_1@10.242.238.88:<0.6502.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 692 into 'ns_1@10.242.238.91' is <18126.24956.0> [ns_server:debug,2014-08-19T16:50:23.094,ns_1@10.242.238.88:<0.6502.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 692 into 'ns_1@10.242.238.90' is <18125.23053.0> [rebalance:debug,2014-08-19T16:50:23.094,ns_1@10.242.238.88:<0.6494.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 692 is <0.6502.1> [views:debug,2014-08-19T16:50:23.106,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/549. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:23.107,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",549,active,0} [ns_server:debug,2014-08-19T16:50:23.125,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,116804}, tap_estimate, {replica_building,"default",692,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24956.0>, <<"replication_building_692_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:23.141,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,132287}, tap_estimate, {replica_building,"default",692,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23053.0>, <<"replication_building_692_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:23.142,ns_1@10.242.238.88:<0.6503.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.23053.0>}, {'ns_1@10.242.238.91',<18126.24956.0>}]) [rebalance:info,2014-08-19T16:50:23.142,ns_1@10.242.238.88:<0.6494.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:23.142,ns_1@10.242.238.88:<0.6494.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 692 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:23.143,ns_1@10.242.238.88:<0.6494.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:23.143,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{692, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:23.147,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{436, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:23.147,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",436, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.6515.1>) [ns_server:debug,2014-08-19T16:50:23.148,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 436) [ns_server:debug,2014-08-19T16:50:23.148,ns_1@10.242.238.88:<0.6516.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:23.148,ns_1@10.242.238.88:<0.6516.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:23.148,ns_1@10.242.238.88:<0.6515.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 436 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:23.148,ns_1@10.242.238.88:<0.6521.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 436 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:23.148,ns_1@10.242.238.88:<0.6522.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 436 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:23.152,ns_1@10.242.238.88:<0.6523.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 436 into 'ns_1@10.242.238.91' is <18126.24961.0> [ns_server:debug,2014-08-19T16:50:23.155,ns_1@10.242.238.88:<0.6523.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 436 into 'ns_1@10.242.238.89' is <18124.27334.0> [rebalance:debug,2014-08-19T16:50:23.155,ns_1@10.242.238.88:<0.6515.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 436 is <0.6523.1> [ns_server:debug,2014-08-19T16:50:23.200,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,191684}, tap_estimate, {replica_building,"default",436,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24961.0>, <<"replication_building_436_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:23.216,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,207250}, tap_estimate, {replica_building,"default",436,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27334.0>, <<"replication_building_436_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:23.216,ns_1@10.242.238.88:<0.6524.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27334.0>}, {'ns_1@10.242.238.91',<18126.24961.0>}]) [rebalance:info,2014-08-19T16:50:23.217,ns_1@10.242.238.88:<0.6515.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:23.217,ns_1@10.242.238.88:<0.6515.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 436 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:23.218,ns_1@10.242.238.88:<0.6515.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:23.218,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{436, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:23.222,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{947, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:23.222,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",947, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.6555.1>) [ns_server:debug,2014-08-19T16:50:23.222,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 947) [ns_server:debug,2014-08-19T16:50:23.223,ns_1@10.242.238.88:<0.6556.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:23.223,ns_1@10.242.238.88:<0.6556.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:23.223,ns_1@10.242.238.88:<0.6555.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 947 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:23.223,ns_1@10.242.238.88:<0.6561.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 947 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:23.223,ns_1@10.242.238.88:<0.6562.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 947 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:23.228,ns_1@10.242.238.88:<0.6563.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 947 into 'ns_1@10.242.238.90' is <18125.23073.0> [ns_server:debug,2014-08-19T16:50:23.230,ns_1@10.242.238.88:<0.6563.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 947 into 'ns_1@10.242.238.91' is <18126.24980.0> [rebalance:debug,2014-08-19T16:50:23.230,ns_1@10.242.238.88:<0.6555.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 947 is <0.6563.1> [ns_server:debug,2014-08-19T16:50:23.261,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,252423}, tap_estimate, {replica_building,"default",947,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23073.0>, <<"replication_building_947_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:23.277,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,268378}, tap_estimate, {replica_building,"default",947,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24980.0>, <<"replication_building_947_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:23.277,ns_1@10.242.238.88:<0.6564.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.24980.0>}, {'ns_1@10.242.238.90',<18125.23073.0>}]) [rebalance:info,2014-08-19T16:50:23.278,ns_1@10.242.238.88:<0.6555.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:23.278,ns_1@10.242.238.88:<0.6555.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 947 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:23.279,ns_1@10.242.238.88:<0.6555.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:23.279,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{947, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:23.282,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 547. Nacking mccouch update. [views:debug,2014-08-19T16:50:23.282,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/547. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:23.282,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",547,active,0} [ns_server:debug,2014-08-19T16:50:23.283,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{691, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:23.283,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",691, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.6576.1>) [ns_server:debug,2014-08-19T16:50:23.284,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 691) [ns_server:debug,2014-08-19T16:50:23.284,ns_1@10.242.238.88:<0.6577.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:23.284,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906,595, 284,829,518,152,697,386,1008,931,620,254,854,488,905,777,722,594,228,956,828, 645,462,334,879,696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176, 904,776,721,593,410,282,955,827,644,516,150,878,695,567,384,256,1006,929,801, 746,618,252,124,980,852,669,486,358,903,775,720,592,226,954,826,643,460,332, 877,694,566,200,1005,928,800,745,617,434,306,979,851,668,540,174,902,774,719, 591,408,280,953,825,642,514,148,876,693,565,510,382,1004,927,799,744,616,250, 122,978,850,667,484,356,901,773,718,590,224,952,824,641,458,330,875,692,564, 198,1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717,589,406,278, 951,823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976,848, 665,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,196,1001, 924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276,949,821, 766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,480, 352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,194,922,794, 739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764,636, 142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,478,350,895, 712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920,792,737,609, 426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634,140,996, 868,685,557,502,374,919,791,736,608,242,114,970,842,659,476,348,893,710,582, 216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607,424,296, 969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994,866,683, 555,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214,1019, 942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967,839, 656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864,681,553,498, 370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940,812, 757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654,526, 160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,551,496,368,913, 785,730,602,236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755,627, 444,316,989,861,678,550,184,912,784,729,601,418,290,963,835,652,524,158,886, 703,575,392,264,1014,937,809,754,626,132,988,860,677,549,494,366,911,783,728, 600,234,962,834,651,468,340,885,702,574,208,1013,936,808,753,625,442,314,987, 859,676,548,182,910,782,727,599,416,288,961,833,650,522,156,884,701,573,390, 262,1012,935,807,752,624,130,986,858,675,547,492,364,909,781,726,598,232,960, 832,649,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546, 180,908,780,725,597,414,286,959,831,648,520,154,882,699,571,388,260,1010,805, 750,128,984,673,362,907,596,230,830,464,698,1009,932,621,310,855,544,178,778, 723,412,957,646,880,569,258,803,748,126,982,671,360] [ns_server:debug,2014-08-19T16:50:23.284,ns_1@10.242.238.88:<0.6577.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:23.284,ns_1@10.242.238.88:<0.6576.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 691 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:23.284,ns_1@10.242.238.88:<0.6582.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 691 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:23.285,ns_1@10.242.238.88:<0.6583.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 691 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:23.288,ns_1@10.242.238.88:<0.6584.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 691 into 'ns_1@10.242.238.91' is <18126.24986.0> [ns_server:debug,2014-08-19T16:50:23.291,ns_1@10.242.238.88:<0.6584.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 691 into 'ns_1@10.242.238.90' is <18125.23092.0> [rebalance:debug,2014-08-19T16:50:23.291,ns_1@10.242.238.88:<0.6576.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 691 is <0.6584.1> [views:debug,2014-08-19T16:50:23.316,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/547. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:23.316,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",547,active,0} [ns_server:debug,2014-08-19T16:50:23.322,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,313043}, tap_estimate, {replica_building,"default",691,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24986.0>, <<"replication_building_691_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:23.336,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,327852}, tap_estimate, {replica_building,"default",691,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23092.0>, <<"replication_building_691_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:23.337,ns_1@10.242.238.88:<0.6585.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.23092.0>}, {'ns_1@10.242.238.91',<18126.24986.0>}]) [rebalance:info,2014-08-19T16:50:23.337,ns_1@10.242.238.88:<0.6576.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:23.338,ns_1@10.242.238.88:<0.6576.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 691 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:23.338,ns_1@10.242.238.88:<0.6576.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:23.339,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{691, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:23.342,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{435, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:23.342,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",435, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.6597.1>) [ns_server:debug,2014-08-19T16:50:23.343,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 435) [ns_server:debug,2014-08-19T16:50:23.343,ns_1@10.242.238.88:<0.6598.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:23.343,ns_1@10.242.238.88:<0.6598.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:23.343,ns_1@10.242.238.88:<0.6597.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 435 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:23.344,ns_1@10.242.238.88:<0.6603.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 435 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:23.344,ns_1@10.242.238.88:<0.6604.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 435 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:23.348,ns_1@10.242.238.88:<0.6605.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 435 into 'ns_1@10.242.238.91' is <18126.24991.0> [ns_server:debug,2014-08-19T16:50:23.350,ns_1@10.242.238.88:<0.6605.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 435 into 'ns_1@10.242.238.89' is <18124.27343.0> [rebalance:debug,2014-08-19T16:50:23.350,ns_1@10.242.238.88:<0.6597.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 435 is <0.6605.1> [ns_server:debug,2014-08-19T16:50:23.381,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,372497}, tap_estimate, {replica_building,"default",435,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.24991.0>, <<"replication_building_435_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:23.398,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,389290}, tap_estimate, {replica_building,"default",435,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27343.0>, <<"replication_building_435_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:23.398,ns_1@10.242.238.88:<0.6606.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27343.0>}, {'ns_1@10.242.238.91',<18126.24991.0>}]) [rebalance:info,2014-08-19T16:50:23.399,ns_1@10.242.238.88:<0.6597.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:23.399,ns_1@10.242.238.88:<0.6597.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 435 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:23.400,ns_1@10.242.238.88:<0.6597.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:23.400,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{435, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:23.404,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{946, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:23.404,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",946, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.6632.1>) [ns_server:debug,2014-08-19T16:50:23.404,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 946) [ns_server:debug,2014-08-19T16:50:23.405,ns_1@10.242.238.88:<0.6633.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:23.405,ns_1@10.242.238.88:<0.6633.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:23.405,ns_1@10.242.238.88:<0.6632.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 946 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:23.405,ns_1@10.242.238.88:<0.6638.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 946 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:23.405,ns_1@10.242.238.88:<0.6639.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 946 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:23.407,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 545. Nacking mccouch update. [views:debug,2014-08-19T16:50:23.407,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/545. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:23.408,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",545,active,0} [ns_server:debug,2014-08-19T16:50:23.409,ns_1@10.242.238.88:<0.6640.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 946 into 'ns_1@10.242.238.90' is <18125.23098.0> [ns_server:debug,2014-08-19T16:50:23.409,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,488,905,777,722,594,228,956, 828,645,462,334,879,696,568,202,1007,930,802,747,619,436,308,981,853,670,542, 176,904,776,721,593,410,282,955,827,644,516,150,878,695,567,384,256,1006,929, 801,746,618,252,124,980,852,669,486,358,903,775,720,592,226,954,826,643,460, 332,877,694,566,200,1005,928,800,745,617,434,306,979,851,668,540,174,902,774, 719,591,408,280,953,825,642,514,148,876,693,565,510,382,1004,927,799,744,616, 250,122,978,850,667,484,356,901,773,718,590,224,952,824,641,458,330,875,692, 564,198,1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717,589,406, 278,951,823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976, 848,665,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,196, 1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276,949, 821,766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663, 480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,194,922, 794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764, 636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,478,350, 895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920,792,737, 609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634,140, 996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,476,348,893,710, 582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607,424, 296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994,866, 683,555,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967, 839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864,681,553, 498,370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940, 812,757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654, 526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,551,496,368, 913,785,730,602,236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755, 627,444,316,989,861,678,550,184,912,784,729,601,418,290,963,835,652,524,158, 886,703,575,392,264,1014,937,809,754,626,132,988,860,677,549,494,366,911,783, 728,600,234,962,834,651,468,340,885,702,574,208,1013,936,808,753,625,442,314, 987,859,676,548,182,910,782,727,599,416,288,961,833,650,522,156,884,701,573, 390,262,1012,935,807,752,624,130,986,858,675,547,492,364,909,781,726,598,232, 960,832,649,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674, 546,180,908,780,725,597,414,286,959,831,648,520,154,882,699,571,388,260,1010, 805,750,128,984,673,362,907,596,230,830,464,698,1009,932,621,310,855,544,178, 778,723,412,957,646,880,569,258,803,748,126,982,671,360] [ns_server:debug,2014-08-19T16:50:23.412,ns_1@10.242.238.88:<0.6640.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 946 into 'ns_1@10.242.238.91' is <18126.25010.0> [rebalance:debug,2014-08-19T16:50:23.412,ns_1@10.242.238.88:<0.6632.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 946 is <0.6640.1> [ns_server:debug,2014-08-19T16:50:23.444,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,435419}, tap_estimate, {replica_building,"default",946,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23098.0>, <<"replication_building_946_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:23.459,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,450469}, tap_estimate, {replica_building,"default",946,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25010.0>, <<"replication_building_946_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:23.459,ns_1@10.242.238.88:<0.6641.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.25010.0>}, {'ns_1@10.242.238.90',<18125.23098.0>}]) [rebalance:info,2014-08-19T16:50:23.460,ns_1@10.242.238.88:<0.6632.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:23.460,ns_1@10.242.238.88:<0.6632.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 946 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:23.460,ns_1@10.242.238.88:<0.6632.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:23.461,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{946, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:23.465,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{690, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:23.465,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",690, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.6654.1>) [ns_server:debug,2014-08-19T16:50:23.465,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 690) [ns_server:debug,2014-08-19T16:50:23.466,ns_1@10.242.238.88:<0.6655.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:23.466,ns_1@10.242.238.88:<0.6655.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:23.466,ns_1@10.242.238.88:<0.6654.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 690 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:23.466,ns_1@10.242.238.88:<0.6660.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 690 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:23.466,ns_1@10.242.238.88:<0.6661.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 690 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:23.470,ns_1@10.242.238.88:<0.6662.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 690 into 'ns_1@10.242.238.91' is <18126.25030.0> [ns_server:debug,2014-08-19T16:50:23.472,ns_1@10.242.238.88:<0.6662.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 690 into 'ns_1@10.242.238.90' is <18125.23131.0> [rebalance:debug,2014-08-19T16:50:23.472,ns_1@10.242.238.88:<0.6654.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 690 is <0.6662.1> [views:debug,2014-08-19T16:50:23.473,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/545. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:23.474,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",545,active,0} [ns_server:debug,2014-08-19T16:50:23.508,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,499427}, tap_estimate, {replica_building,"default",690,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25030.0>, <<"replication_building_690_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:23.525,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,516423}, tap_estimate, {replica_building,"default",690,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23131.0>, <<"replication_building_690_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:23.525,ns_1@10.242.238.88:<0.6663.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.23131.0>}, {'ns_1@10.242.238.91',<18126.25030.0>}]) [rebalance:info,2014-08-19T16:50:23.525,ns_1@10.242.238.88:<0.6654.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:23.526,ns_1@10.242.238.88:<0.6654.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 690 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:23.526,ns_1@10.242.238.88:<0.6654.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:23.532,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{690, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:23.535,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{434, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:23.535,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",434, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.6689.1>) [ns_server:debug,2014-08-19T16:50:23.536,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 434) [ns_server:debug,2014-08-19T16:50:23.536,ns_1@10.242.238.88:<0.6690.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:23.536,ns_1@10.242.238.88:<0.6690.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:23.536,ns_1@10.242.238.88:<0.6689.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 434 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:23.536,ns_1@10.242.238.88:<0.6695.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 434 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:23.537,ns_1@10.242.238.88:<0.6696.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 434 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:23.540,ns_1@10.242.238.88:<0.6697.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 434 into 'ns_1@10.242.238.91' is <18126.25035.0> [ns_server:debug,2014-08-19T16:50:23.543,ns_1@10.242.238.88:<0.6697.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 434 into 'ns_1@10.242.238.89' is <18124.27360.0> [rebalance:debug,2014-08-19T16:50:23.543,ns_1@10.242.238.88:<0.6689.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 434 is <0.6697.1> [ns_server:debug,2014-08-19T16:50:23.554,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 543. Nacking mccouch update. [views:debug,2014-08-19T16:50:23.555,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/543. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:23.555,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",543,active,0} [ns_server:debug,2014-08-19T16:50:23.556,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,905,777,722,594,228, 956,828,645,462,334,879,696,568,202,1007,930,802,747,619,436,308,981,853,670, 542,176,904,776,721,593,410,282,955,827,644,516,150,878,695,567,384,256,1006, 929,801,746,618,252,124,980,852,669,486,358,903,775,720,592,226,954,826,643, 460,332,877,694,566,200,1005,928,800,745,617,434,306,979,851,668,540,174,902, 774,719,591,408,280,953,825,642,514,148,876,693,565,510,382,1004,927,799,744, 616,250,122,978,850,667,484,356,901,773,718,590,224,952,824,641,458,330,875, 692,564,198,1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717,589, 406,278,951,823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120, 976,848,665,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562, 196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276, 949,821,766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846, 663,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,194, 922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819, 764,636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,478, 350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920,792, 737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634, 140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607, 424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994, 866,683,555,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580, 214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294, 967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864,681, 553,498,370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017, 940,812,757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837, 654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,551,496, 368,913,785,730,602,236,108,964,836,653,470,342,887,704,576,210,1015,938,810, 755,627,444,316,989,861,678,550,184,912,784,729,601,418,290,963,835,652,524, 158,886,703,575,392,264,1014,937,809,754,626,132,988,860,677,549,494,366,911, 783,728,600,234,962,834,651,468,340,885,702,574,208,1013,936,808,753,625,442, 314,987,859,676,548,182,910,782,727,599,416,288,961,833,650,522,156,884,701, 573,390,262,1012,935,807,752,624,130,986,858,675,547,492,364,909,781,726,598, 232,960,832,649,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857, 674,546,180,908,780,725,597,414,286,959,831,648,520,154,882,699,571,388,260, 1010,805,750,128,984,673,362,907,596,230,830,464,698,1009,932,621,310,855, 544,178,778,723,412,957,646,880,569,258,803,748,126,982,671,360] [ns_server:debug,2014-08-19T16:50:23.574,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,565466}, tap_estimate, {replica_building,"default",434,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25035.0>, <<"replication_building_434_'ns_1@10.242.238.91'">>} [views:debug,2014-08-19T16:50:23.588,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/543. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:23.589,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",543,active,0} [ns_server:debug,2014-08-19T16:50:23.590,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,581098}, tap_estimate, {replica_building,"default",434,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27360.0>, <<"replication_building_434_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:23.590,ns_1@10.242.238.88:<0.6698.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27360.0>}, {'ns_1@10.242.238.91',<18126.25035.0>}]) [rebalance:info,2014-08-19T16:50:23.590,ns_1@10.242.238.88:<0.6689.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:23.591,ns_1@10.242.238.88:<0.6689.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 434 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:23.592,ns_1@10.242.238.88:<0.6689.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:23.592,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{434, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:23.596,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{945, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:23.596,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",945, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.6710.1>) [ns_server:debug,2014-08-19T16:50:23.596,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 945) [ns_server:debug,2014-08-19T16:50:23.596,ns_1@10.242.238.88:<0.6711.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:23.596,ns_1@10.242.238.88:<0.6711.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:23.597,ns_1@10.242.238.88:<0.6710.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 945 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:23.597,ns_1@10.242.238.88:<0.6716.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 945 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:23.597,ns_1@10.242.238.88:<0.6717.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 945 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:23.601,ns_1@10.242.238.88:<0.6718.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 945 into 'ns_1@10.242.238.90' is <18125.23153.0> [ns_server:debug,2014-08-19T16:50:23.604,ns_1@10.242.238.88:<0.6718.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 945 into 'ns_1@10.242.238.91' is <18126.25055.0> [rebalance:debug,2014-08-19T16:50:23.604,ns_1@10.242.238.88:<0.6710.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 945 is <0.6718.1> [ns_server:debug,2014-08-19T16:50:23.635,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,626274}, tap_estimate, {replica_building,"default",945,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23153.0>, <<"replication_building_945_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:23.650,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,641220}, tap_estimate, {replica_building,"default",945,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25055.0>, <<"replication_building_945_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:23.650,ns_1@10.242.238.88:<0.6719.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.25055.0>}, {'ns_1@10.242.238.90',<18125.23153.0>}]) [rebalance:info,2014-08-19T16:50:23.650,ns_1@10.242.238.88:<0.6710.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:23.651,ns_1@10.242.238.88:<0.6710.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 945 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:23.651,ns_1@10.242.238.88:<0.6710.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:23.652,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{945, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:23.656,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{689, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:23.656,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",689, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.6745.1>) [ns_server:debug,2014-08-19T16:50:23.656,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 689) [ns_server:debug,2014-08-19T16:50:23.656,ns_1@10.242.238.88:<0.6746.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:23.656,ns_1@10.242.238.88:<0.6746.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:23.656,ns_1@10.242.238.88:<0.6745.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 689 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:23.657,ns_1@10.242.238.88:<0.6751.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 689 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:23.657,ns_1@10.242.238.88:<0.6752.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 689 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:23.660,ns_1@10.242.238.88:<0.6753.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 689 into 'ns_1@10.242.238.91' is <18126.25061.0> [ns_server:debug,2014-08-19T16:50:23.663,ns_1@10.242.238.88:<0.6753.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 689 into 'ns_1@10.242.238.90' is <18125.23166.0> [rebalance:debug,2014-08-19T16:50:23.663,ns_1@10.242.238.88:<0.6745.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 689 is <0.6753.1> [ns_server:debug,2014-08-19T16:50:23.664,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 541. Nacking mccouch update. [views:debug,2014-08-19T16:50:23.664,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/541. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:23.664,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",541,active,0} [ns_server:debug,2014-08-19T16:50:23.665,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,905,777,722,594,228, 956,828,645,462,334,879,696,568,202,1007,930,802,747,619,436,308,981,853,670, 542,176,904,776,721,593,410,282,955,827,644,516,150,878,695,567,384,256,1006, 929,801,746,618,252,124,980,852,669,541,486,358,903,775,720,592,226,954,826, 643,460,332,877,694,566,200,1005,928,800,745,617,434,306,979,851,668,540,174, 902,774,719,591,408,280,953,825,642,514,148,876,693,565,510,382,1004,927,799, 744,616,250,122,978,850,667,484,356,901,773,718,590,224,952,824,641,458,330, 875,692,564,198,1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717, 589,406,278,951,823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248, 120,976,848,665,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690, 562,196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404, 276,949,821,766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974, 846,663,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560, 194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947, 819,764,636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661, 478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920, 792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762, 634,140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708, 580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422, 294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864, 681,553,498,370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212, 1017,940,812,757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965, 837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,551, 496,368,913,785,730,602,236,108,964,836,653,470,342,887,704,576,210,1015,938, 810,755,627,444,316,989,861,678,550,184,912,784,729,601,418,290,963,835,652, 524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860,677,549,494,366, 911,783,728,600,234,962,834,651,468,340,885,702,574,208,1013,936,808,753,625, 442,314,987,859,676,548,182,910,782,727,599,416,288,961,833,650,522,156,884, 701,573,390,262,1012,935,807,752,624,130,986,858,675,547,492,364,909,781,726, 598,232,960,832,649,466,338,883,700,572,206,1011,934,806,751,623,440,312,985, 857,674,546,180,908,780,725,597,414,286,959,831,648,520,154,882,699,571,388, 260,1010,805,750,128,984,673,362,907,596,230,830,464,698,1009,932,621,310, 855,544,178,778,723,412,957,646,880,569,258,803,748,126,982,671,360] [ns_server:debug,2014-08-19T16:50:23.694,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,685326}, tap_estimate, {replica_building,"default",689,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25061.0>, <<"replication_building_689_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:23.708,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,699785}, tap_estimate, {replica_building,"default",689,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23166.0>, <<"replication_building_689_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:23.709,ns_1@10.242.238.88:<0.6754.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.23166.0>}, {'ns_1@10.242.238.91',<18126.25061.0>}]) [rebalance:info,2014-08-19T16:50:23.709,ns_1@10.242.238.88:<0.6745.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:23.709,ns_1@10.242.238.88:<0.6745.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 689 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:23.710,ns_1@10.242.238.88:<0.6745.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:23.711,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{689, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [views:debug,2014-08-19T16:50:23.714,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/541. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:23.714,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",541,active,0} [ns_server:debug,2014-08-19T16:50:23.715,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{433, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:23.715,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",433, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.6766.1>) [ns_server:debug,2014-08-19T16:50:23.715,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 433) [ns_server:debug,2014-08-19T16:50:23.716,ns_1@10.242.238.88:<0.6767.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:23.716,ns_1@10.242.238.88:<0.6767.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:23.716,ns_1@10.242.238.88:<0.6766.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 433 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:23.716,ns_1@10.242.238.88:<0.6772.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 433 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:23.716,ns_1@10.242.238.88:<0.6773.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 433 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:23.720,ns_1@10.242.238.88:<0.6774.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 433 into 'ns_1@10.242.238.91' is <18126.25080.0> [ns_server:debug,2014-08-19T16:50:23.723,ns_1@10.242.238.88:<0.6774.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 433 into 'ns_1@10.242.238.89' is <18124.27380.0> [rebalance:debug,2014-08-19T16:50:23.723,ns_1@10.242.238.88:<0.6766.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 433 is <0.6774.1> [ns_server:debug,2014-08-19T16:50:23.754,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,745048}, tap_estimate, {replica_building,"default",433,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25080.0>, <<"replication_building_433_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:23.769,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,760219}, tap_estimate, {replica_building,"default",433,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27380.0>, <<"replication_building_433_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:23.769,ns_1@10.242.238.88:<0.6775.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27380.0>}, {'ns_1@10.242.238.91',<18126.25080.0>}]) [rebalance:info,2014-08-19T16:50:23.769,ns_1@10.242.238.88:<0.6766.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:23.770,ns_1@10.242.238.88:<0.6766.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 433 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:23.770,ns_1@10.242.238.88:<0.6766.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:23.771,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{433, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:23.775,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{944, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:23.775,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",944, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.6787.1>) [ns_server:debug,2014-08-19T16:50:23.775,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 944) [ns_server:debug,2014-08-19T16:50:23.775,ns_1@10.242.238.88:<0.6788.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:23.775,ns_1@10.242.238.88:<0.6788.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:23.775,ns_1@10.242.238.88:<0.6787.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 944 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:23.776,ns_1@10.242.238.88:<0.6793.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 944 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:23.776,ns_1@10.242.238.88:<0.6794.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 944 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:23.779,ns_1@10.242.238.88:<0.6795.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 944 into 'ns_1@10.242.238.90' is <18125.23178.0> [ns_server:debug,2014-08-19T16:50:23.782,ns_1@10.242.238.88:<0.6795.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 944 into 'ns_1@10.242.238.91' is <18126.25085.0> [rebalance:debug,2014-08-19T16:50:23.782,ns_1@10.242.238.88:<0.6787.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 944 is <0.6795.1> [ns_server:debug,2014-08-19T16:50:23.816,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,807489}, tap_estimate, {replica_building,"default",944,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23178.0>, <<"replication_building_944_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:23.828,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,819380}, tap_estimate, {replica_building,"default",944,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25085.0>, <<"replication_building_944_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:23.828,ns_1@10.242.238.88:<0.6796.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.25085.0>}, {'ns_1@10.242.238.90',<18125.23178.0>}]) [rebalance:info,2014-08-19T16:50:23.829,ns_1@10.242.238.88:<0.6787.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:23.829,ns_1@10.242.238.88:<0.6787.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 944 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:23.830,ns_1@10.242.238.88:<0.6787.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:23.830,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{944, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:23.834,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{688, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:23.834,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",688, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.6822.1>) [ns_server:debug,2014-08-19T16:50:23.834,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 688) [ns_server:debug,2014-08-19T16:50:23.835,ns_1@10.242.238.88:<0.6823.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:23.835,ns_1@10.242.238.88:<0.6823.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:23.835,ns_1@10.242.238.88:<0.6822.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 688 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:23.835,ns_1@10.242.238.88:<0.6828.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 688 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:23.835,ns_1@10.242.238.88:<0.6829.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 688 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:23.839,ns_1@10.242.238.88:<0.6830.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 688 into 'ns_1@10.242.238.91' is <18126.25105.0> [ns_server:debug,2014-08-19T16:50:23.842,ns_1@10.242.238.88:<0.6830.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 688 into 'ns_1@10.242.238.90' is <18125.23183.0> [rebalance:debug,2014-08-19T16:50:23.842,ns_1@10.242.238.88:<0.6822.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 688 is <0.6830.1> [ns_server:debug,2014-08-19T16:50:23.872,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,863070}, tap_estimate, {replica_building,"default",688,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25105.0>, <<"replication_building_688_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:23.881,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 539. Nacking mccouch update. [views:debug,2014-08-19T16:50:23.881,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/539. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:23.881,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",539,active,0} [ns_server:debug,2014-08-19T16:50:23.883,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,828,645, 462,334,879,696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176,904, 776,721,593,410,282,955,827,644,516,150,878,695,567,384,256,1006,929,801,746, 618,252,124,980,852,669,541,486,358,903,775,720,592,226,954,826,643,460,332, 877,694,566,200,1005,928,800,745,617,434,306,979,851,668,540,174,902,774,719, 591,408,280,953,825,642,514,148,876,693,565,510,382,1004,927,799,744,616,250, 122,978,850,667,539,484,356,901,773,718,590,224,952,824,641,458,330,875,692, 564,198,1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717,589,406, 278,951,823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976, 848,665,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,196, 1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276,949, 821,766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663, 480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,194,922, 794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764, 636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,478,350, 895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920,792,737, 609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634,140, 996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,476,348,893,710, 582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607,424, 296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994,866, 683,555,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294,967, 839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864,681,553, 498,370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017,940, 812,757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837,654, 526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,551,496,368, 913,785,730,602,236,108,964,836,653,470,342,887,704,576,210,1015,938,810,755, 627,444,316,989,861,678,550,184,912,784,729,601,418,290,963,835,652,524,158, 886,703,575,392,264,1014,937,809,754,626,132,988,860,677,549,494,366,911,783, 728,600,234,962,834,651,468,340,885,702,574,208,1013,936,808,753,625,442,314, 987,859,676,548,182,910,782,727,599,416,288,961,833,650,522,156,884,701,573, 390,262,1012,935,807,752,624,130,986,858,675,547,492,364,909,781,726,598,232, 960,832,649,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674, 546,180,908,780,725,597,414,286,959,831,648,520,154,882,699,571,388,260,1010, 805,750,128,984,673,362,907,596,230,830,464,698,1009,932,621,310,855,544,178, 778,723,412,957,646,880,569,258,803,748,126,982,671,360,905,594,228] [ns_server:debug,2014-08-19T16:50:23.890,ns_1@10.242.238.88:<0.6831.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.23183.0>}, {'ns_1@10.242.238.91',<18126.25105.0>}]) [rebalance:info,2014-08-19T16:50:23.890,ns_1@10.242.238.88:<0.6822.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:23.891,ns_1@10.242.238.88:<0.6822.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 688 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:23.891,ns_1@10.242.238.88:<0.6822.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:23.892,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{688, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:23.898,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{432, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:23.899,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",432, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.6843.1>) [ns_server:debug,2014-08-19T16:50:23.899,ns_1@10.242.238.88:<0.6844.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:23.899,ns_1@10.242.238.88:<0.6844.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:23.900,ns_1@10.242.238.88:<0.6843.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 432 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:23.900,ns_1@10.242.238.88:<0.6849.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 432 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [ns_server:debug,2014-08-19T16:50:23.900,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,880997}, tap_estimate, {replica_building,"default",688,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23183.0>, <<"replication_building_688_'ns_1@10.242.238.90'">>} [rebalance:info,2014-08-19T16:50:23.900,ns_1@10.242.238.88:<0.6850.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 432 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:23.900,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 432) [ns_server:debug,2014-08-19T16:50:23.903,ns_1@10.242.238.88:<0.6851.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 432 into 'ns_1@10.242.238.91' is <18126.25110.0> [ns_server:debug,2014-08-19T16:50:23.906,ns_1@10.242.238.88:<0.6851.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 432 into 'ns_1@10.242.238.89' is <18124.27406.0> [rebalance:debug,2014-08-19T16:50:23.906,ns_1@10.242.238.88:<0.6843.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 432 is <0.6851.1> [views:debug,2014-08-19T16:50:23.915,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/539. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:23.915,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",539,active,0} [ns_server:debug,2014-08-19T16:50:23.937,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,928573}, tap_estimate, {replica_building,"default",432,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25110.0>, <<"replication_building_432_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:23.953,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,944036}, tap_estimate, {replica_building,"default",432,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27406.0>, <<"replication_building_432_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:23.953,ns_1@10.242.238.88:<0.6852.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27406.0>}, {'ns_1@10.242.238.91',<18126.25110.0>}]) [rebalance:info,2014-08-19T16:50:23.953,ns_1@10.242.238.88:<0.6843.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:23.954,ns_1@10.242.238.88:<0.6843.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 432 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:23.954,ns_1@10.242.238.88:<0.6843.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:23.955,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{432, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:23.959,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{943, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:23.959,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",943, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.6878.1>) [ns_server:debug,2014-08-19T16:50:23.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 943) [ns_server:debug,2014-08-19T16:50:23.959,ns_1@10.242.238.88:<0.6879.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:23.960,ns_1@10.242.238.88:<0.6879.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:23.960,ns_1@10.242.238.88:<0.6878.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 943 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:23.960,ns_1@10.242.238.88:<0.6884.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 943 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:23.960,ns_1@10.242.238.88:<0.6885.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 943 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:23.963,ns_1@10.242.238.88:<0.6886.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 943 into 'ns_1@10.242.238.90' is <18125.23203.0> [ns_server:debug,2014-08-19T16:50:23.966,ns_1@10.242.238.88:<0.6886.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 943 into 'ns_1@10.242.238.91' is <18126.25115.0> [rebalance:debug,2014-08-19T16:50:23.966,ns_1@10.242.238.88:<0.6878.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 943 is <0.6886.1> [ns_server:debug,2014-08-19T16:50:23.999,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 537. Nacking mccouch update. [views:debug,2014-08-19T16:50:23.999,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/537. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:23.999,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",537,active,0} [ns_server:debug,2014-08-19T16:50:23.999,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452623,990690}, tap_estimate, {replica_building,"default",943,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23203.0>, <<"replication_building_943_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:24.001,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,828,645, 462,334,879,696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176,904, 776,721,593,410,282,955,827,644,516,150,878,695,567,384,256,1006,929,801,746, 618,252,124,980,852,669,541,486,358,903,775,720,592,226,954,826,643,460,332, 877,694,566,200,1005,928,800,745,617,434,306,979,851,668,540,174,902,774,719, 591,408,280,953,825,642,514,148,876,693,565,510,382,1004,927,799,744,616,250, 122,978,850,667,539,484,356,901,773,718,590,224,952,824,641,458,330,875,692, 564,198,1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717,589,406, 278,951,823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976, 848,665,537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562, 196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276, 949,821,766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846, 663,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,194, 922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819, 764,636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,478, 350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920,792, 737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634, 140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607, 424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994, 866,683,555,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708,580, 214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294, 967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864,681, 553,498,370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212,1017, 940,812,757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965,837, 654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,551,496, 368,913,785,730,602,236,108,964,836,653,470,342,887,704,576,210,1015,938,810, 755,627,444,316,989,861,678,550,184,912,784,729,601,418,290,963,835,652,524, 158,886,703,575,392,264,1014,937,809,754,626,132,988,860,677,549,494,366,911, 783,728,600,234,962,834,651,468,340,885,702,574,208,1013,936,808,753,625,442, 314,987,859,676,548,182,910,782,727,599,416,288,961,833,650,522,156,884,701, 573,390,262,1012,935,807,752,624,130,986,858,675,547,492,364,909,781,726,598, 232,960,832,649,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857, 674,546,180,908,780,725,597,414,286,959,831,648,520,154,882,699,571,388,260, 1010,805,750,128,984,673,362,907,596,230,830,464,698,1009,932,621,310,855, 544,178,778,723,412,957,646,880,569,258,803,748,126,982,671,360,905,594,228] [ns_server:debug,2014-08-19T16:50:24.012,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,3412}, tap_estimate, {replica_building,"default",943,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25115.0>, <<"replication_building_943_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:24.013,ns_1@10.242.238.88:<0.6887.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.25115.0>}, {'ns_1@10.242.238.90',<18125.23203.0>}]) [rebalance:info,2014-08-19T16:50:24.013,ns_1@10.242.238.88:<0.6878.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:24.013,ns_1@10.242.238.88:<0.6878.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 943 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:24.014,ns_1@10.242.238.88:<0.6878.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:24.014,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{943, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:24.018,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{687, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:24.018,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",687, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.6899.1>) [ns_server:debug,2014-08-19T16:50:24.018,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 687) [ns_server:debug,2014-08-19T16:50:24.019,ns_1@10.242.238.88:<0.6900.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:24.019,ns_1@10.242.238.88:<0.6900.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:24.019,ns_1@10.242.238.88:<0.6899.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 687 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:24.019,ns_1@10.242.238.88:<0.6906.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 687 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [rebalance:info,2014-08-19T16:50:24.019,ns_1@10.242.238.88:<0.6905.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 687 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [ns_server:debug,2014-08-19T16:50:24.023,ns_1@10.242.238.88:<0.6907.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 687 into 'ns_1@10.242.238.91' is <18126.25135.0> [ns_server:debug,2014-08-19T16:50:24.026,ns_1@10.242.238.88:<0.6907.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 687 into 'ns_1@10.242.238.90' is <18125.23208.0> [rebalance:debug,2014-08-19T16:50:24.026,ns_1@10.242.238.88:<0.6899.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 687 is <0.6907.1> [views:debug,2014-08-19T16:50:24.032,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/537. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:24.033,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",537,active,0} [ns_server:debug,2014-08-19T16:50:24.055,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,46896}, tap_estimate, {replica_building,"default",687,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25135.0>, <<"replication_building_687_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:24.072,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,63605}, tap_estimate, {replica_building,"default",687,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23208.0>, <<"replication_building_687_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:24.073,ns_1@10.242.238.88:<0.6908.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.23208.0>}, {'ns_1@10.242.238.91',<18126.25135.0>}]) [rebalance:info,2014-08-19T16:50:24.073,ns_1@10.242.238.88:<0.6899.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:24.073,ns_1@10.242.238.88:<0.6899.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 687 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:24.074,ns_1@10.242.238.88:<0.6899.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:24.074,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{687, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:24.078,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{431, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:24.078,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",431, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.6920.1>) [ns_server:debug,2014-08-19T16:50:24.078,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 431) [ns_server:debug,2014-08-19T16:50:24.079,ns_1@10.242.238.88:<0.6921.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:24.079,ns_1@10.242.238.88:<0.6921.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:24.079,ns_1@10.242.238.88:<0.6920.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 431 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:24.079,ns_1@10.242.238.88:<0.6926.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 431 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:24.080,ns_1@10.242.238.88:<0.6927.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 431 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:24.083,ns_1@10.242.238.88:<0.6928.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 431 into 'ns_1@10.242.238.91' is <18126.25140.0> [ns_server:debug,2014-08-19T16:50:24.086,ns_1@10.242.238.88:<0.6928.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 431 into 'ns_1@10.242.238.89' is <18124.27427.0> [rebalance:debug,2014-08-19T16:50:24.086,ns_1@10.242.238.88:<0.6920.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 431 is <0.6928.1> [ns_server:debug,2014-08-19T16:50:24.116,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,107458}, tap_estimate, {replica_building,"default",431,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25140.0>, <<"replication_building_431_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:24.136,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,127928}, tap_estimate, {replica_building,"default",431,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27427.0>, <<"replication_building_431_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:24.137,ns_1@10.242.238.88:<0.6933.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27427.0>}, {'ns_1@10.242.238.91',<18126.25140.0>}]) [rebalance:info,2014-08-19T16:50:24.137,ns_1@10.242.238.88:<0.6920.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:24.138,ns_1@10.242.238.88:<0.6920.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 431 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:24.139,ns_1@10.242.238.88:<0.6920.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:24.139,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{431, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:24.143,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{942, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:24.143,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",942, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.6955.1>) [ns_server:debug,2014-08-19T16:50:24.143,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 942) [ns_server:debug,2014-08-19T16:50:24.143,ns_1@10.242.238.88:<0.6956.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:24.144,ns_1@10.242.238.88:<0.6956.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:24.144,ns_1@10.242.238.88:<0.6955.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 942 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:24.144,ns_1@10.242.238.88:<0.6961.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 942 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:24.144,ns_1@10.242.238.88:<0.6962.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 942 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:24.148,ns_1@10.242.238.88:<0.6963.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 942 into 'ns_1@10.242.238.90' is <18125.23228.0> [ns_server:debug,2014-08-19T16:50:24.150,ns_1@10.242.238.88:<0.6963.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 942 into 'ns_1@10.242.238.91' is <18126.25145.0> [rebalance:debug,2014-08-19T16:50:24.150,ns_1@10.242.238.88:<0.6955.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 942 is <0.6963.1> [ns_server:debug,2014-08-19T16:50:24.176,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 535. Nacking mccouch update. [views:debug,2014-08-19T16:50:24.176,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/535. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:24.177,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",535,active,0} [ns_server:debug,2014-08-19T16:50:24.178,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,828,645, 462,334,879,696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176,904, 776,721,593,410,282,955,827,644,516,150,878,695,567,384,256,1006,929,801,746, 618,252,124,980,852,669,541,486,358,903,775,720,592,226,954,826,643,460,332, 877,694,566,200,1005,928,800,745,617,434,306,979,851,668,540,174,902,774,719, 591,408,280,953,825,642,514,148,876,693,565,510,382,1004,927,799,744,616,250, 122,978,850,667,539,484,356,901,773,718,590,224,952,824,641,458,330,875,692, 564,198,1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717,589,406, 278,951,823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976, 848,665,537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562, 196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276, 949,821,766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846, 663,535,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560, 194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947, 819,764,636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661, 478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920, 792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762, 634,140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,474,346,891,708, 580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422, 294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864, 681,553,498,370,915,787,732,604,238,110,966,838,655,472,344,889,706,578,212, 1017,940,812,757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965, 837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,551, 496,368,913,785,730,602,236,108,964,836,653,470,342,887,704,576,210,1015,938, 810,755,627,444,316,989,861,678,550,184,912,784,729,601,418,290,963,835,652, 524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860,677,549,494,366, 911,783,728,600,234,962,834,651,468,340,885,702,574,208,1013,936,808,753,625, 442,314,987,859,676,548,182,910,782,727,599,416,288,961,833,650,522,156,884, 701,573,390,262,1012,935,807,752,624,130,986,858,675,547,492,364,909,781,726, 598,232,960,832,649,466,338,883,700,572,206,1011,934,806,751,623,440,312,985, 857,674,546,180,908,780,725,597,414,286,959,831,648,520,154,882,699,571,388, 260,1010,805,750,128,984,673,362,907,596,230,830,464,698,1009,932,621,310, 855,544,178,778,723,412,957,646,880,569,258,803,748,126,982,671,360,905,594, 228] [ns_server:debug,2014-08-19T16:50:24.183,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,174295}, tap_estimate, {replica_building,"default",942,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23228.0>, <<"replication_building_942_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:24.196,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,187859}, tap_estimate, {replica_building,"default",942,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25145.0>, <<"replication_building_942_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:24.197,ns_1@10.242.238.88:<0.6964.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.25145.0>}, {'ns_1@10.242.238.90',<18125.23228.0>}]) [rebalance:info,2014-08-19T16:50:24.197,ns_1@10.242.238.88:<0.6955.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:24.198,ns_1@10.242.238.88:<0.6955.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 942 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:24.198,ns_1@10.242.238.88:<0.6955.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:24.199,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{942, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:24.202,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{686, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:24.202,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",686, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.6976.1>) [ns_server:debug,2014-08-19T16:50:24.203,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 686) [ns_server:debug,2014-08-19T16:50:24.203,ns_1@10.242.238.88:<0.6977.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:24.203,ns_1@10.242.238.88:<0.6977.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:24.203,ns_1@10.242.238.88:<0.6976.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 686 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:24.204,ns_1@10.242.238.88:<0.6982.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 686 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:24.204,ns_1@10.242.238.88:<0.6983.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 686 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:24.208,ns_1@10.242.238.88:<0.6984.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 686 into 'ns_1@10.242.238.91' is <18126.25151.0> [ns_server:debug,2014-08-19T16:50:24.210,ns_1@10.242.238.88:<0.6984.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 686 into 'ns_1@10.242.238.90' is <18125.23233.0> [rebalance:debug,2014-08-19T16:50:24.211,ns_1@10.242.238.88:<0.6976.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 686 is <0.6984.1> [ns_server:debug,2014-08-19T16:50:24.242,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,233728}, tap_estimate, {replica_building,"default",686,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25151.0>, <<"replication_building_686_'ns_1@10.242.238.91'">>} [rebalance:info,2014-08-19T16:50:24.254,ns_1@10.242.238.88:<0.6189.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 440 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:24.255,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 440 state to active [rebalance:info,2014-08-19T16:50:24.256,ns_1@10.242.238.88:<0.6189.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 440 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:24.256,ns_1@10.242.238.88:<0.6189.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:24.260,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,251272}, tap_estimate, {replica_building,"default",686,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23233.0>, <<"replication_building_686_'ns_1@10.242.238.90'">>} [views:debug,2014-08-19T16:50:24.260,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/535. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:24.260,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",535,active,0} [ns_server:debug,2014-08-19T16:50:24.260,ns_1@10.242.238.88:<0.6985.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.23233.0>}, {'ns_1@10.242.238.91',<18126.25151.0>}]) [rebalance:info,2014-08-19T16:50:24.260,ns_1@10.242.238.88:<0.6976.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:24.261,ns_1@10.242.238.88:<0.6976.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 686 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:24.262,ns_1@10.242.238.88:<0.6976.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:24.262,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{686, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:24.266,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{430, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:24.266,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",430, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.7001.1>) [ns_server:debug,2014-08-19T16:50:24.266,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 430) [ns_server:debug,2014-08-19T16:50:24.266,ns_1@10.242.238.88:<0.7002.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:24.266,ns_1@10.242.238.88:<0.7002.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:24.267,ns_1@10.242.238.88:<0.7001.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 430 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:24.267,ns_1@10.242.238.88:<0.7007.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 430 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:24.267,ns_1@10.242.238.88:<0.7008.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 430 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:24.270,ns_1@10.242.238.88:<0.7009.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 430 into 'ns_1@10.242.238.91' is <18126.25173.0> [ns_server:debug,2014-08-19T16:50:24.273,ns_1@10.242.238.88:<0.7009.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 430 into 'ns_1@10.242.238.89' is <18124.27450.0> [rebalance:debug,2014-08-19T16:50:24.273,ns_1@10.242.238.88:<0.7001.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 430 is <0.7009.1> [ns_server:debug,2014-08-19T16:50:24.304,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,295645}, tap_estimate, {replica_building,"default",430,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25173.0>, <<"replication_building_430_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:24.319,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,310965}, tap_estimate, {replica_building,"default",430,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27450.0>, <<"replication_building_430_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:24.320,ns_1@10.242.238.88:<0.7010.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27450.0>}, {'ns_1@10.242.238.91',<18126.25173.0>}]) [rebalance:info,2014-08-19T16:50:24.320,ns_1@10.242.238.88:<0.7001.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:24.321,ns_1@10.242.238.88:<0.7001.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 430 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:24.321,ns_1@10.242.238.88:<0.7001.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:24.322,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{430, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:24.325,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{941, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:24.326,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",941, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.7022.1>) [ns_server:debug,2014-08-19T16:50:24.326,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 941) [ns_server:debug,2014-08-19T16:50:24.326,ns_1@10.242.238.88:<0.7023.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:24.326,ns_1@10.242.238.88:<0.7023.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:24.327,ns_1@10.242.238.88:<0.7022.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 941 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:24.327,ns_1@10.242.238.88:<0.7028.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 941 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:24.327,ns_1@10.242.238.88:<0.7029.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 941 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:24.331,ns_1@10.242.238.88:<0.7030.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 941 into 'ns_1@10.242.238.90' is <18125.23253.0> [ns_server:debug,2014-08-19T16:50:24.333,ns_1@10.242.238.88:<0.7030.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 941 into 'ns_1@10.242.238.91' is <18126.25178.0> [rebalance:debug,2014-08-19T16:50:24.333,ns_1@10.242.238.88:<0.7022.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 941 is <0.7030.1> [ns_server:debug,2014-08-19T16:50:24.364,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,355688}, tap_estimate, {replica_building,"default",941,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23253.0>, <<"replication_building_941_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:24.380,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,371010}, tap_estimate, {replica_building,"default",941,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25178.0>, <<"replication_building_941_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:24.380,ns_1@10.242.238.88:<0.7031.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.25178.0>}, {'ns_1@10.242.238.90',<18125.23253.0>}]) [rebalance:info,2014-08-19T16:50:24.380,ns_1@10.242.238.88:<0.7022.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:24.381,ns_1@10.242.238.88:<0.7022.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 941 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:24.381,ns_1@10.242.238.88:<0.7022.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:24.382,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{941, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:24.385,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{685, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:24.386,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",685, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.7057.1>) [ns_server:debug,2014-08-19T16:50:24.386,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 685) [ns_server:debug,2014-08-19T16:50:24.386,ns_1@10.242.238.88:<0.7058.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:24.386,ns_1@10.242.238.88:<0.7058.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:24.386,ns_1@10.242.238.88:<0.7057.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 685 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:24.387,ns_1@10.242.238.88:<0.7063.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 685 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:24.387,ns_1@10.242.238.88:<0.7064.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 685 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:24.391,ns_1@10.242.238.88:<0.7065.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 685 into 'ns_1@10.242.238.91' is <18126.25184.0> [ns_server:debug,2014-08-19T16:50:24.393,ns_1@10.242.238.88:<0.7065.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 685 into 'ns_1@10.242.238.90' is <18125.23258.0> [rebalance:debug,2014-08-19T16:50:24.393,ns_1@10.242.238.88:<0.7057.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 685 is <0.7065.1> [ns_server:debug,2014-08-19T16:50:24.418,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 533. Nacking mccouch update. [views:debug,2014-08-19T16:50:24.418,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/533. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:24.418,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",533,active,0} [ns_server:debug,2014-08-19T16:50:24.420,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,828,645, 462,334,879,696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176,904, 776,721,593,410,282,955,827,644,516,150,878,695,567,384,256,1006,929,801,746, 618,252,124,980,852,669,541,486,358,903,775,720,592,226,954,826,643,460,332, 877,694,566,200,1005,928,800,745,617,434,306,979,851,668,540,174,902,774,719, 591,408,280,953,825,642,514,148,876,693,565,510,382,1004,927,799,744,616,250, 122,978,850,667,539,484,356,901,773,718,590,224,952,824,641,458,330,875,692, 564,198,1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717,589,406, 278,951,823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976, 848,665,537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562, 196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276, 949,821,766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846, 663,535,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560, 194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947, 819,764,636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661, 533,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192, 920,792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817, 762,634,140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,476, 348,893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790, 735,607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632, 138,994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605, 422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992, 864,681,553,498,370,915,787,732,604,238,110,966,838,655,472,344,889,706,578, 212,1017,940,812,757,629,446,318,991,863,680,552,186,914,786,731,603,420,292, 965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679, 551,496,368,913,785,730,602,236,108,964,836,653,470,342,887,704,576,210,1015, 938,810,755,627,444,316,989,861,678,550,184,912,784,729,601,418,290,963,835, 652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860,677,549,494, 366,911,783,728,600,234,962,834,651,468,340,885,702,574,208,1013,936,808,753, 625,442,314,987,859,676,548,182,910,782,727,599,416,288,961,833,650,522,156, 884,701,573,390,262,1012,935,807,752,624,130,986,858,675,547,492,364,909,781, 726,598,232,960,832,649,466,338,883,700,572,206,1011,934,806,751,623,440,312, 985,857,674,546,180,908,780,725,597,414,286,959,831,648,520,154,882,699,571, 388,260,1010,805,750,128,984,673,362,907,596,230,830,464,698,1009,932,621, 310,855,544,178,778,723,412,957,646,880,569,258,803,748,126,982,671,360,905, 594,228] [ns_server:debug,2014-08-19T16:50:24.426,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,417205}, tap_estimate, {replica_building,"default",685,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25184.0>, <<"replication_building_685_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:24.441,ns_1@10.242.238.88:<0.7066.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.23258.0>}, {'ns_1@10.242.238.91',<18126.25184.0>}]) [rebalance:info,2014-08-19T16:50:24.441,ns_1@10.242.238.88:<0.7057.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:24.441,ns_1@10.242.238.88:<0.7057.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 685 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:24.442,ns_1@10.242.238.88:<0.7057.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:24.442,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{685, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:24.447,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,431751}, tap_estimate, {replica_building,"default",685,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23258.0>, <<"replication_building_685_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:24.448,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{429, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:24.448,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",429, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.7078.1>) [ns_server:debug,2014-08-19T16:50:24.449,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 429) [ns_server:debug,2014-08-19T16:50:24.449,ns_1@10.242.238.88:<0.7079.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:24.449,ns_1@10.242.238.88:<0.7079.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:24.449,ns_1@10.242.238.88:<0.7078.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 429 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:24.450,ns_1@10.242.238.88:<0.7084.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 429 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:24.450,ns_1@10.242.238.88:<0.7085.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 429 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:24.453,ns_1@10.242.238.88:<0.7086.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 429 into 'ns_1@10.242.238.91' is <18126.25189.0> [ns_server:debug,2014-08-19T16:50:24.456,ns_1@10.242.238.88:<0.7086.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 429 into 'ns_1@10.242.238.89' is <18124.27484.0> [rebalance:debug,2014-08-19T16:50:24.456,ns_1@10.242.238.88:<0.7078.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 429 is <0.7086.1> [views:debug,2014-08-19T16:50:24.486,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/533. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:24.486,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",533,active,0} [ns_server:debug,2014-08-19T16:50:24.487,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,478253}, tap_estimate, {replica_building,"default",429,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25189.0>, <<"replication_building_429_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:24.503,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,493990}, tap_estimate, {replica_building,"default",429,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27484.0>, <<"replication_building_429_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:24.503,ns_1@10.242.238.88:<0.7087.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27484.0>}, {'ns_1@10.242.238.91',<18126.25189.0>}]) [rebalance:info,2014-08-19T16:50:24.503,ns_1@10.242.238.88:<0.7078.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:24.504,ns_1@10.242.238.88:<0.7078.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 429 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:24.504,ns_1@10.242.238.88:<0.7078.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:24.505,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{429, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:24.508,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{940, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:24.508,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",940, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.7099.1>) [ns_server:debug,2014-08-19T16:50:24.509,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 940) [ns_server:debug,2014-08-19T16:50:24.509,ns_1@10.242.238.88:<0.7100.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:24.509,ns_1@10.242.238.88:<0.7100.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:24.509,ns_1@10.242.238.88:<0.7099.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 940 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:24.510,ns_1@10.242.238.88:<0.7105.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 940 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:24.510,ns_1@10.242.238.88:<0.7106.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 940 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:24.518,ns_1@10.242.238.88:<0.7107.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 940 into 'ns_1@10.242.238.90' is <18125.23279.0> [ns_server:debug,2014-08-19T16:50:24.520,ns_1@10.242.238.88:<0.7107.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 940 into 'ns_1@10.242.238.91' is <18126.25208.0> [rebalance:debug,2014-08-19T16:50:24.520,ns_1@10.242.238.88:<0.7099.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 940 is <0.7107.1> [ns_server:debug,2014-08-19T16:50:24.557,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,548433}, tap_estimate, {replica_building,"default",940,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23279.0>, <<"replication_building_940_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:24.565,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,556360}, tap_estimate, {replica_building,"default",940,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25208.0>, <<"replication_building_940_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:24.565,ns_1@10.242.238.88:<0.7108.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.25208.0>}, {'ns_1@10.242.238.90',<18125.23279.0>}]) [rebalance:info,2014-08-19T16:50:24.565,ns_1@10.242.238.88:<0.7099.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:24.566,ns_1@10.242.238.88:<0.7099.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 940 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:24.566,ns_1@10.242.238.88:<0.7099.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:24.567,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{940, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:24.571,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{684, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:24.571,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",684, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.7134.1>) [ns_server:debug,2014-08-19T16:50:24.571,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 684) [ns_server:debug,2014-08-19T16:50:24.572,ns_1@10.242.238.88:<0.7135.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:24.572,ns_1@10.242.238.88:<0.7135.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:24.572,ns_1@10.242.238.88:<0.7134.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 684 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:24.572,ns_1@10.242.238.88:<0.7140.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 684 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:24.572,ns_1@10.242.238.88:<0.7141.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 684 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:24.576,ns_1@10.242.238.88:<0.7142.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 684 into 'ns_1@10.242.238.91' is <18126.25220.0> [ns_server:debug,2014-08-19T16:50:24.579,ns_1@10.242.238.88:<0.7142.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 684 into 'ns_1@10.242.238.90' is <18125.23284.0> [rebalance:debug,2014-08-19T16:50:24.579,ns_1@10.242.238.88:<0.7134.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 684 is <0.7142.1> [ns_server:debug,2014-08-19T16:50:24.616,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,607122}, tap_estimate, {replica_building,"default",684,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25220.0>, <<"replication_building_684_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:24.625,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,616486}, tap_estimate, {replica_building,"default",684,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23284.0>, <<"replication_building_684_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:24.626,ns_1@10.242.238.88:<0.7143.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.23284.0>}, {'ns_1@10.242.238.91',<18126.25220.0>}]) [rebalance:info,2014-08-19T16:50:24.626,ns_1@10.242.238.88:<0.7134.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:24.626,ns_1@10.242.238.88:<0.7134.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 684 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:24.627,ns_1@10.242.238.88:<0.7134.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:24.627,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{684, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:24.631,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{428, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:24.631,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",428, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.7155.1>) [ns_server:debug,2014-08-19T16:50:24.631,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 428) [ns_server:debug,2014-08-19T16:50:24.632,ns_1@10.242.238.88:<0.7156.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:24.632,ns_1@10.242.238.88:<0.7156.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:24.632,ns_1@10.242.238.88:<0.7155.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 428 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:24.632,ns_1@10.242.238.88:<0.7161.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 428 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:24.632,ns_1@10.242.238.88:<0.7162.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 428 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:24.636,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 531. Nacking mccouch update. [views:debug,2014-08-19T16:50:24.636,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/531. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:24.636,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",531,active,0} [ns_server:debug,2014-08-19T16:50:24.636,ns_1@10.242.238.88:<0.7163.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 428 into 'ns_1@10.242.238.91' is <18126.25225.0> [ns_server:debug,2014-08-19T16:50:24.638,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,828,645, 462,334,879,696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176,904, 776,721,593,410,282,955,827,644,516,150,878,695,567,384,256,1006,929,801,746, 618,252,124,980,852,669,541,486,358,903,775,720,592,226,954,826,643,460,332, 877,694,566,200,1005,928,800,745,617,434,306,979,851,668,540,174,902,774,719, 591,408,280,953,825,642,514,148,876,693,565,510,382,1004,927,799,744,616,250, 122,978,850,667,539,484,356,901,773,718,590,224,952,824,641,458,330,875,692, 564,198,1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717,589,406, 278,951,823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976, 848,665,537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562, 196,1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276, 949,821,766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846, 663,535,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560, 194,922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947, 819,764,636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661, 533,478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192, 920,792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817, 762,634,140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531, 476,348,893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918, 790,735,607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760, 632,138,994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,474,346, 891,708,580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733, 605,422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136, 992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,472,344,889,706, 578,212,1017,940,812,757,629,446,318,991,863,680,552,186,914,786,731,603,420, 292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862, 679,551,496,368,913,785,730,602,236,108,964,836,653,470,342,887,704,576,210, 1015,938,810,755,627,444,316,989,861,678,550,184,912,784,729,601,418,290,963, 835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860,677,549, 494,366,911,783,728,600,234,962,834,651,468,340,885,702,574,208,1013,936,808, 753,625,442,314,987,859,676,548,182,910,782,727,599,416,288,961,833,650,522, 156,884,701,573,390,262,1012,935,807,752,624,130,986,858,675,547,492,364,909, 781,726,598,232,960,832,649,466,338,883,700,572,206,1011,934,806,751,623,440, 312,985,857,674,546,180,908,780,725,597,414,286,959,831,648,520,154,882,699, 571,388,260,1010,805,750,128,984,673,362,907,596,230,830,464,698,1009,932, 621,310,855,544,178,778,723,412,957,646,880,569,258,803,748,126,982,671,360, 905,594,228] [ns_server:debug,2014-08-19T16:50:24.638,ns_1@10.242.238.88:<0.7163.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 428 into 'ns_1@10.242.238.89' is <18124.27504.0> [rebalance:debug,2014-08-19T16:50:24.639,ns_1@10.242.238.88:<0.7155.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 428 is <0.7163.1> [ns_server:debug,2014-08-19T16:50:24.672,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,663551}, tap_estimate, {replica_building,"default",428,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25225.0>, <<"replication_building_428_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:24.687,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,678600}, tap_estimate, {replica_building,"default",428,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27504.0>, <<"replication_building_428_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:24.688,ns_1@10.242.238.88:<0.7164.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27504.0>}, {'ns_1@10.242.238.91',<18126.25225.0>}]) [rebalance:info,2014-08-19T16:50:24.688,ns_1@10.242.238.88:<0.7155.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:24.688,ns_1@10.242.238.88:<0.7155.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 428 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:24.689,ns_1@10.242.238.88:<0.7155.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:24.689,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{428, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:24.693,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{939, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:24.693,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",939, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.7176.1>) [ns_server:debug,2014-08-19T16:50:24.693,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 939) [ns_server:debug,2014-08-19T16:50:24.694,ns_1@10.242.238.88:<0.7177.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:24.694,ns_1@10.242.238.88:<0.7177.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:24.694,ns_1@10.242.238.88:<0.7176.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 939 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:24.694,ns_1@10.242.238.88:<0.7182.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 939 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:24.694,ns_1@10.242.238.88:<0.7183.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 939 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:24.698,ns_1@10.242.238.88:<0.7184.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 939 into 'ns_1@10.242.238.90' is <18125.23304.0> [ns_server:debug,2014-08-19T16:50:24.700,ns_1@10.242.238.88:<0.7184.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 939 into 'ns_1@10.242.238.91' is <18126.25244.0> [rebalance:debug,2014-08-19T16:50:24.701,ns_1@10.242.238.88:<0.7176.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 939 is <0.7184.1> [views:debug,2014-08-19T16:50:24.703,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/531. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:24.703,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",531,active,0} [ns_server:debug,2014-08-19T16:50:24.733,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,724839}, tap_estimate, {replica_building,"default",939,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23304.0>, <<"replication_building_939_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:24.746,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,737807}, tap_estimate, {replica_building,"default",939,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25244.0>, <<"replication_building_939_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:24.747,ns_1@10.242.238.88:<0.7185.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.25244.0>}, {'ns_1@10.242.238.90',<18125.23304.0>}]) [rebalance:info,2014-08-19T16:50:24.747,ns_1@10.242.238.88:<0.7176.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:24.747,ns_1@10.242.238.88:<0.7176.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 939 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:24.748,ns_1@10.242.238.88:<0.7176.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:24.749,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{939, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:24.752,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{683, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:24.752,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",683, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.7197.1>) [ns_server:debug,2014-08-19T16:50:24.752,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 683) [ns_server:debug,2014-08-19T16:50:24.753,ns_1@10.242.238.88:<0.7198.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:24.753,ns_1@10.242.238.88:<0.7198.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:24.753,ns_1@10.242.238.88:<0.7197.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 683 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:24.753,ns_1@10.242.238.88:<0.7203.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 683 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:24.753,ns_1@10.242.238.88:<0.7204.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 683 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:24.757,ns_1@10.242.238.88:<0.7205.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 683 into 'ns_1@10.242.238.91' is <18126.25250.0> [ns_server:debug,2014-08-19T16:50:24.759,ns_1@10.242.238.88:<0.7205.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 683 into 'ns_1@10.242.238.90' is <18125.23309.0> [rebalance:debug,2014-08-19T16:50:24.760,ns_1@10.242.238.88:<0.7197.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 683 is <0.7205.1> [ns_server:debug,2014-08-19T16:50:24.792,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,783277}, tap_estimate, {replica_building,"default",683,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25250.0>, <<"replication_building_683_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:24.804,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,795657}, tap_estimate, {replica_building,"default",683,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23309.0>, <<"replication_building_683_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:24.805,ns_1@10.242.238.88:<0.7206.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.23309.0>}, {'ns_1@10.242.238.91',<18126.25250.0>}]) [rebalance:info,2014-08-19T16:50:24.805,ns_1@10.242.238.88:<0.7197.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:24.805,ns_1@10.242.238.88:<0.7197.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 683 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:24.806,ns_1@10.242.238.88:<0.7197.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:24.807,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{683, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:24.810,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{427, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:24.810,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",427, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']] (<0.7232.1>) [ns_server:debug,2014-08-19T16:50:24.810,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 427) [ns_server:debug,2014-08-19T16:50:24.811,ns_1@10.242.238.88:<0.7233.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:24.811,ns_1@10.242.238.88:<0.7233.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:24.811,ns_1@10.242.238.88:<0.7232.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 427 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:24.811,ns_1@10.242.238.88:<0.7238.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 427 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:24.811,ns_1@10.242.238.88:<0.7239.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 427 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:24.815,ns_1@10.242.238.88:<0.7240.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 427 into 'ns_1@10.242.238.91' is <18126.25269.0> [ns_server:debug,2014-08-19T16:50:24.817,ns_1@10.242.238.88:<0.7240.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 427 into 'ns_1@10.242.238.89' is <18124.27524.0> [rebalance:debug,2014-08-19T16:50:24.818,ns_1@10.242.238.88:<0.7232.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 427 is <0.7240.1> [ns_server:debug,2014-08-19T16:50:24.850,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,841327}, tap_estimate, {replica_building,"default",427,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25269.0>, <<"replication_building_427_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:24.853,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 529. Nacking mccouch update. [views:debug,2014-08-19T16:50:24.853,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/529. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:24.854,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",529,active,0} [ns_server:debug,2014-08-19T16:50:24.855,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176,904,776,721, 593,410,282,955,827,644,516,150,878,695,567,384,256,1006,929,801,746,618,252, 124,980,852,669,541,486,358,903,775,720,592,226,954,826,643,460,332,877,694, 566,200,1005,928,800,745,617,434,306,979,851,668,540,174,902,774,719,591,408, 280,953,825,642,514,148,876,693,565,510,382,1004,927,799,744,616,250,122,978, 850,667,539,484,356,901,773,718,590,224,952,824,641,458,330,875,692,564,198, 1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717,589,406,278,951, 823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976,848,665, 537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,196,1001, 924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276,949,821, 766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,535, 480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,194,922, 794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764, 636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478, 350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920,792, 737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634, 140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605, 422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992, 864,681,553,498,370,915,787,732,604,238,110,966,838,655,472,344,889,706,578, 212,1017,940,812,757,629,446,318,991,863,680,552,186,914,786,731,603,420,292, 965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679, 551,496,368,913,785,730,602,236,108,964,836,653,470,342,887,704,576,210,1015, 938,810,755,627,444,316,989,861,678,550,184,912,784,729,601,418,290,963,835, 652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860,677,549,494, 366,911,783,728,600,234,962,834,651,468,340,885,702,574,208,1013,936,808,753, 625,442,314,987,859,676,548,182,910,782,727,599,416,288,961,833,650,522,156, 884,701,573,390,262,1012,935,807,752,624,130,986,858,675,547,492,364,909,781, 726,598,232,960,832,649,466,338,883,700,572,206,1011,934,806,751,623,440,312, 985,857,674,546,180,908,780,725,597,414,286,959,831,648,520,154,882,699,571, 388,260,1010,805,750,128,984,673,362,907,596,230,830,464,698,1009,932,621, 310,855,544,178,778,723,412,957,646,880,569,258,803,748,126,982,671,360,905, 594,228,828,462] [ns_server:debug,2014-08-19T16:50:24.865,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,856459}, tap_estimate, {replica_building,"default",427,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27524.0>, <<"replication_building_427_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:24.865,ns_1@10.242.238.88:<0.7241.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27524.0>}, {'ns_1@10.242.238.91',<18126.25269.0>}]) [rebalance:info,2014-08-19T16:50:24.866,ns_1@10.242.238.88:<0.7232.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:24.866,ns_1@10.242.238.88:<0.7232.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 427 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:24.867,ns_1@10.242.238.88:<0.7232.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:24.867,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{427, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:24.871,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{938, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:24.871,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",938, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']] (<0.7253.1>) [ns_server:debug,2014-08-19T16:50:24.871,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 938) [ns_server:debug,2014-08-19T16:50:24.871,ns_1@10.242.238.88:<0.7254.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:24.872,ns_1@10.242.238.88:<0.7254.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:24.872,ns_1@10.242.238.88:<0.7253.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 938 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:24.872,ns_1@10.242.238.88:<0.7259.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 938 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:24.872,ns_1@10.242.238.88:<0.7260.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 938 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:24.876,ns_1@10.242.238.88:<0.7261.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 938 into 'ns_1@10.242.238.90' is <18125.23329.0> [ns_server:debug,2014-08-19T16:50:24.879,ns_1@10.242.238.88:<0.7261.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 938 into 'ns_1@10.242.238.91' is <18126.25274.0> [rebalance:debug,2014-08-19T16:50:24.879,ns_1@10.242.238.88:<0.7253.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 938 is <0.7261.1> [ns_server:debug,2014-08-19T16:50:24.911,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,902117}, tap_estimate, {replica_building,"default",938,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.23329.0>, <<"replication_building_938_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:24.926,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452624,917022}, tap_estimate, {replica_building,"default",938,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.25274.0>, <<"replication_building_938_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:24.926,ns_1@10.242.238.88:<0.7262.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.25274.0>}, {'ns_1@10.242.238.90',<18125.23329.0>}]) [rebalance:info,2014-08-19T16:50:24.926,ns_1@10.242.238.88:<0.7253.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:24.927,ns_1@10.242.238.88:<0.7253.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 938 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:24.927,ns_1@10.242.238.88:<0.7253.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:24.928,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{938, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}} [views:debug,2014-08-19T16:50:24.929,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/529. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:24.929,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",529,active,0} [ns_server:debug,2014-08-19T16:50:24.929,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:25.087,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 527. Nacking mccouch update. [views:debug,2014-08-19T16:50:25.087,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/527. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:25.088,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",527,active,0} [ns_server:debug,2014-08-19T16:50:25.089,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176,904,776,721, 593,410,282,955,827,644,516,150,878,695,567,384,256,1006,929,801,746,618,252, 124,980,852,669,541,486,358,903,775,720,592,226,954,826,643,460,332,877,694, 566,200,1005,928,800,745,617,434,306,979,851,668,540,174,902,774,719,591,408, 280,953,825,642,514,148,876,693,565,510,382,1004,927,799,744,616,250,122,978, 850,667,539,484,356,901,773,718,590,224,952,824,641,458,330,875,692,564,198, 1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717,589,406,278,951, 823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976,848,665, 537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,196,1001, 924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276,949,821, 766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,535, 480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,194,922, 794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764, 636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478, 350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920,792, 737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634, 140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605, 422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992, 864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706, 578,212,1017,940,812,757,629,446,318,991,863,680,552,186,914,786,731,603,420, 292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862, 679,551,496,368,913,785,730,602,236,108,964,836,653,470,342,887,704,576,210, 1015,938,810,755,627,444,316,989,861,678,550,184,912,784,729,601,418,290,963, 835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860,677,549, 494,366,911,783,728,600,234,962,834,651,468,340,885,702,574,208,1013,936,808, 753,625,442,314,987,859,676,548,182,910,782,727,599,416,288,961,833,650,522, 156,884,701,573,390,262,1012,935,807,752,624,130,986,858,675,547,492,364,909, 781,726,598,232,960,832,649,466,338,883,700,572,206,1011,934,806,751,623,440, 312,985,857,674,546,180,908,780,725,597,414,286,959,831,648,520,154,882,699, 571,388,260,1010,805,750,128,984,673,362,907,596,230,830,464,698,1009,932, 621,310,855,544,178,778,723,412,957,646,880,569,258,803,748,126,982,671,360, 905,594,228,828,462] [views:debug,2014-08-19T16:50:25.150,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/527. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:25.150,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",527,active,0} [ns_server:debug,2014-08-19T16:50:25.225,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 525. Nacking mccouch update. [views:debug,2014-08-19T16:50:25.225,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/525. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:25.225,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",525,active,0} [ns_server:debug,2014-08-19T16:50:25.227,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176,904,776,721, 593,410,282,955,827,644,516,150,878,695,567,384,256,1006,929,801,746,618,252, 124,980,852,669,541,486,358,903,775,720,592,226,954,826,643,460,332,877,694, 566,200,1005,928,800,745,617,434,306,979,851,668,540,174,902,774,719,591,408, 280,953,825,642,514,148,876,693,565,510,382,1004,927,799,744,616,250,122,978, 850,667,539,484,356,901,773,718,590,224,952,824,641,458,330,875,692,564,198, 1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717,589,406,278,951, 823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976,848,665, 537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,196,1001, 924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276,949,821, 766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,535, 480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,194,922, 794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764, 636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478, 350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920,792, 737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634, 140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605, 422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992, 864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706, 578,212,1017,940,812,757,629,446,318,991,863,680,552,186,914,786,731,603,420, 292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862, 679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887,704,576, 210,1015,938,810,755,627,444,316,989,861,678,550,184,912,784,729,601,418,290, 963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860,677, 549,494,366,911,783,728,600,234,962,834,651,468,340,885,702,574,208,1013,936, 808,753,625,442,314,987,859,676,548,182,910,782,727,599,416,288,961,833,650, 522,156,884,701,573,390,262,1012,935,807,752,624,130,986,858,675,547,492,364, 909,781,726,598,232,960,832,649,466,338,883,700,572,206,1011,934,806,751,623, 440,312,985,857,674,546,180,908,780,725,597,414,286,959,831,648,520,154,882, 699,571,388,260,1010,805,750,128,984,673,362,907,596,230,830,464,698,1009, 932,621,310,855,544,178,778,723,412,957,646,880,569,258,803,748,126,982,671, 360,905,594,228,828,462] [views:debug,2014-08-19T16:50:25.285,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/525. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:25.285,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",525,active,0} [ns_server:debug,2014-08-19T16:50:25.460,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 523. Nacking mccouch update. [views:debug,2014-08-19T16:50:25.460,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/523. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:25.461,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",523,active,0} [ns_server:debug,2014-08-19T16:50:25.462,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176,904,776,721, 593,410,282,955,827,644,516,150,878,695,567,384,256,1006,929,801,746,618,252, 124,980,852,669,541,486,358,903,775,720,592,226,954,826,643,460,332,877,694, 566,200,1005,928,800,745,617,434,306,979,851,668,540,174,902,774,719,591,408, 280,953,825,642,514,148,876,693,565,510,382,1004,927,799,744,616,250,122,978, 850,667,539,484,356,901,773,718,590,224,952,824,641,458,330,875,692,564,198, 1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717,589,406,278,951, 823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976,848,665, 537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,196,1001, 924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276,949,821, 766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,535, 480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,194,922, 794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764, 636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478, 350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920,792, 737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634, 140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605, 422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992, 864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706, 578,212,1017,940,812,757,629,446,318,991,863,680,552,186,914,786,731,603,420, 292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862, 679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887,704,576, 210,1015,938,810,755,627,444,316,989,861,678,550,184,912,784,729,601,418,290, 963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860,677, 549,494,366,911,783,728,600,234,962,834,651,523,468,340,885,702,574,208,1013, 936,808,753,625,442,314,987,859,676,548,182,910,782,727,599,416,288,961,833, 650,522,156,884,701,573,390,262,1012,935,807,752,624,130,986,858,675,547,492, 364,909,781,726,598,232,960,832,649,466,338,883,700,572,206,1011,934,806,751, 623,440,312,985,857,674,546,180,908,780,725,597,414,286,959,831,648,520,154, 882,699,571,388,260,1010,805,750,128,984,673,362,907,596,230,830,464,698, 1009,932,621,310,855,544,178,778,723,412,957,646,880,569,258,803,748,126,982, 671,360,905,594,228,828,462] [views:debug,2014-08-19T16:50:25.544,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/523. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:25.546,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",523,active,0} [ns_server:debug,2014-08-19T16:50:25.644,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 521. Nacking mccouch update. [views:debug,2014-08-19T16:50:25.645,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/521. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:25.645,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",521,active,0} [ns_server:debug,2014-08-19T16:50:25.646,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,696,568,202,1007,930,802,747,619,436,308,981,853,670,542,176,904,776,721, 593,410,282,955,827,644,516,150,878,695,567,384,256,1006,929,801,746,618,252, 124,980,852,669,541,486,358,903,775,720,592,226,954,826,643,460,332,877,694, 566,200,1005,928,800,745,617,434,306,979,851,668,540,174,902,774,719,591,408, 280,953,825,642,514,148,876,693,565,510,382,1004,927,799,744,616,250,122,978, 850,667,539,484,356,901,773,718,590,224,952,824,641,458,330,875,692,564,198, 1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717,589,406,278,951, 823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976,848,665, 537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,196,1001, 924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276,949,821, 766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,535, 480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,194,922, 794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764, 636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478, 350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920,792, 737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634, 140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605, 422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992, 864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706, 578,212,1017,940,812,757,629,446,318,991,863,680,552,186,914,786,731,603,420, 292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862, 679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887,704,576, 210,1015,938,810,755,627,444,316,989,861,678,550,184,912,784,729,601,418,290, 963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860,677, 549,494,366,911,783,728,600,234,962,834,651,523,468,340,885,702,574,208,1013, 936,808,753,625,442,314,987,859,676,548,182,910,782,727,599,416,288,961,833, 650,522,156,884,701,573,390,262,1012,935,807,752,624,130,986,858,675,547,492, 364,909,781,726,598,232,960,832,649,521,466,338,883,700,572,206,1011,934,806, 751,623,440,312,985,857,674,546,180,908,780,725,597,414,286,959,831,648,520, 154,882,699,571,388,260,1010,805,750,128,984,673,362,907,596,230,830,464,698, 1009,932,621,310,855,544,178,778,723,412,957,646,880,569,258,803,748,126,982, 671,360,905,594,228,828,462] [views:debug,2014-08-19T16:50:25.703,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/521. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:25.703,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",521,active,0} [ns_server:debug,2014-08-19T16:50:25.804,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 519. Nacking mccouch update. [views:debug,2014-08-19T16:50:25.804,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/519. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:25.804,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",519,active,0} [ns_server:debug,2014-08-19T16:50:25.806,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,930,802,747,619,436,308,981,853,670,542,176,904,776,721,593,410, 282,955,827,644,516,150,878,695,567,384,256,1006,929,801,746,618,252,124,980, 852,669,541,486,358,903,775,720,592,226,954,826,643,460,332,877,694,566,200, 1005,928,800,745,617,434,306,979,851,668,540,174,902,774,719,591,408,280,953, 825,642,514,148,876,693,565,510,382,1004,927,799,744,616,250,122,978,850,667, 539,484,356,901,773,718,590,224,952,824,641,458,330,875,692,564,198,1003,926, 798,743,615,432,304,977,849,666,538,172,900,772,717,589,406,278,951,823,640, 512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976,848,665,537,482, 354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,196,1001,924,796, 741,613,430,302,975,847,664,536,170,898,770,715,587,404,276,949,821,766,638, 144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,535,480,352, 897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,194,922,794,739, 611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764,636,142, 998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895, 712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920,792,737,609, 426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634,140,996, 868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710, 582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607,424, 296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994,866, 683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708,580, 214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294, 967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864,681, 553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706,578,212, 1017,940,812,757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965, 837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,551, 496,368,913,785,730,602,236,108,964,836,653,525,470,342,887,704,576,210,1015, 938,810,755,627,444,316,989,861,678,550,184,912,784,729,601,418,290,963,835, 652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860,677,549,494, 366,911,783,728,600,234,962,834,651,523,468,340,885,702,574,208,1013,936,808, 753,625,442,314,987,859,676,548,182,910,782,727,599,416,288,961,833,650,522, 156,884,701,573,390,262,1012,935,807,752,624,130,986,858,675,547,492,364,909, 781,726,598,232,960,832,649,521,466,338,883,700,572,206,1011,934,806,751,623, 440,312,985,857,674,546,180,908,780,725,597,414,286,959,831,648,520,154,882, 699,571,388,260,1010,805,750,128,984,673,362,907,596,230,830,519,464,698, 1009,932,621,310,855,544,178,778,723,412,957,646,880,569,258,803,748,126,982, 671,360,905,594,228,828,462,696,1007] [views:debug,2014-08-19T16:50:25.888,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/519. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:25.888,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",519,active,0} [ns_server:debug,2014-08-19T16:50:25.988,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 517. Nacking mccouch update. [views:debug,2014-08-19T16:50:25.988,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/517. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:25.988,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",517,active,0} [ns_server:debug,2014-08-19T16:50:25.990,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,930,802,747,619,436,308,981,853,670,542,176,904,776,721,593,410, 282,955,827,644,516,150,878,695,567,384,256,1006,929,801,746,618,252,124,980, 852,669,541,486,358,903,775,720,592,226,954,826,643,460,332,877,694,566,200, 1005,928,800,745,617,434,306,979,851,668,540,174,902,774,719,591,408,280,953, 825,642,514,148,876,693,565,510,382,1004,927,799,744,616,250,122,978,850,667, 539,484,356,901,773,718,590,224,952,824,641,458,330,875,692,564,198,1003,926, 798,743,615,432,304,977,849,666,538,172,900,772,717,589,406,278,951,823,640, 512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976,848,665,537,482, 354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,196,1001,924,796, 741,613,430,302,975,847,664,536,170,898,770,715,587,404,276,949,821,766,638, 144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,535,480,352, 897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,194,922,794,739, 611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764,636,142, 998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895, 712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920,792,737,609, 426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634,140,996, 868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710, 582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607,424, 296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994,866, 683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708,580, 214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422,294, 967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864,681, 553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706,578,212, 1017,940,812,757,629,446,318,991,863,680,552,186,914,786,731,603,420,292,965, 837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679,551, 496,368,913,785,730,602,236,108,964,836,653,525,470,342,887,704,576,210,1015, 938,810,755,627,444,316,989,861,678,550,184,912,784,729,601,418,290,963,835, 652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860,677,549,494, 366,911,783,728,600,234,962,834,651,523,468,340,885,702,574,208,1013,936,808, 753,625,442,314,987,859,676,548,182,910,782,727,599,416,288,961,833,650,522, 156,884,701,573,390,262,1012,935,807,752,624,130,986,858,675,547,492,364,909, 781,726,598,232,960,832,649,521,466,338,883,700,572,206,1011,934,806,751,623, 440,312,985,857,674,546,180,908,780,725,597,414,286,959,831,648,520,154,882, 699,571,388,260,1010,805,750,128,984,673,362,907,596,230,830,519,464,698, 1009,932,621,310,855,544,178,778,723,412,957,646,880,569,258,803,748,126,982, 671,360,905,594,228,828,517,462,696,1007] [views:debug,2014-08-19T16:50:26.022,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/517. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:26.022,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",517,active,0} [ns_server:debug,2014-08-19T16:50:26.122,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 515. Nacking mccouch update. [views:debug,2014-08-19T16:50:26.122,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/515. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:26.122,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",515,active,0} [ns_server:debug,2014-08-19T16:50:26.124,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,930,802,747,619,436,308,981,853,670,542,176,904,776,721,593,410, 282,955,827,644,516,150,878,695,567,384,256,1006,929,801,746,618,252,124,980, 852,669,541,486,358,903,775,720,592,226,954,826,643,515,460,332,877,694,566, 200,1005,928,800,745,617,434,306,979,851,668,540,174,902,774,719,591,408,280, 953,825,642,514,148,876,693,565,510,382,1004,927,799,744,616,250,122,978,850, 667,539,484,356,901,773,718,590,224,952,824,641,458,330,875,692,564,198,1003, 926,798,743,615,432,304,977,849,666,538,172,900,772,717,589,406,278,951,823, 640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976,848,665,537, 482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,196,1001,924, 796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276,949,821,766, 638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,535,480, 352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,194,922,794, 739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764,636, 142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478,350, 895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920,792,737, 609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634,140, 996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607, 424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994, 866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708, 580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422, 294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864, 681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706,578, 212,1017,940,812,757,629,446,318,991,863,680,552,186,914,786,731,603,420,292, 965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679, 551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887,704,576,210, 1015,938,810,755,627,444,316,989,861,678,550,184,912,784,729,601,418,290,963, 835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860,677,549, 494,366,911,783,728,600,234,962,834,651,523,468,340,885,702,574,208,1013,936, 808,753,625,442,314,987,859,676,548,182,910,782,727,599,416,288,961,833,650, 522,156,884,701,573,390,262,1012,935,807,752,624,130,986,858,675,547,492,364, 909,781,726,598,232,960,832,649,521,466,338,883,700,572,206,1011,934,806,751, 623,440,312,985,857,674,546,180,908,780,725,597,414,286,959,831,648,520,154, 882,699,571,388,260,1010,805,750,128,984,673,362,907,596,230,830,519,464,698, 1009,932,621,310,855,544,178,778,723,412,957,646,880,569,258,803,748,126,982, 671,360,905,594,228,828,517,462,696,1007] [views:debug,2014-08-19T16:50:26.206,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/515. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:26.206,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",515,active,0} [ns_server:debug,2014-08-19T16:50:26.339,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 513. Nacking mccouch update. [views:debug,2014-08-19T16:50:26.340,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/513. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:26.340,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",513,active,0} [ns_server:debug,2014-08-19T16:50:26.341,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,930,802,747,619,436,308,981,853,670,542,176,904,776,721,593,410, 282,955,827,644,516,150,878,695,567,384,256,1006,929,801,746,618,252,124,980, 852,669,541,486,358,903,775,720,592,226,954,826,643,515,460,332,877,694,566, 200,1005,928,800,745,617,434,306,979,851,668,540,174,902,774,719,591,408,280, 953,825,642,514,148,876,693,565,510,382,1004,927,799,744,616,250,122,978,850, 667,539,484,356,901,773,718,590,224,952,824,641,513,458,330,875,692,564,198, 1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717,589,406,278,951, 823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976,848,665, 537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,196,1001, 924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276,949,821, 766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,535, 480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,194,922, 794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764, 636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478, 350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920,792, 737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634, 140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605, 422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992, 864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706, 578,212,1017,940,812,757,629,446,318,991,863,680,552,186,914,786,731,603,420, 292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862, 679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887,704,576, 210,1015,938,810,755,627,444,316,989,861,678,550,184,912,784,729,601,418,290, 963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860,677, 549,494,366,911,783,728,600,234,962,834,651,523,468,340,885,702,574,208,1013, 936,808,753,625,442,314,987,859,676,548,182,910,782,727,599,416,288,961,833, 650,522,156,884,701,573,390,262,1012,935,807,752,624,130,986,858,675,547,492, 364,909,781,726,598,232,960,832,649,521,466,338,883,700,572,206,1011,934,806, 751,623,440,312,985,857,674,546,180,908,780,725,597,414,286,959,831,648,520, 154,882,699,571,388,260,1010,805,750,128,984,673,362,907,596,230,830,519,464, 698,1009,932,621,310,855,544,178,778,723,412,957,646,880,569,258,803,748,126, 982,671,360,905,594,228,828,517,462,696,1007] [views:debug,2014-08-19T16:50:26.423,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/513. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:26.424,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",513,active,0} [ns_server:debug,2014-08-19T16:50:26.499,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 511. Nacking mccouch update. [views:debug,2014-08-19T16:50:26.499,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/511. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:26.499,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",511,active,0} [ns_server:debug,2014-08-19T16:50:26.501,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,930,802,747,619,436,308,981,853,670,542,176,904,776,721,593,410, 282,955,827,644,516,150,878,695,567,384,256,1006,929,801,746,618,252,124,980, 852,669,541,486,358,903,775,720,592,226,954,826,643,515,460,332,877,694,566, 511,200,1005,928,800,745,617,434,306,979,851,668,540,174,902,774,719,591,408, 280,953,825,642,514,148,876,693,565,510,382,1004,927,799,744,616,250,122,978, 850,667,539,484,356,901,773,718,590,224,952,824,641,513,458,330,875,692,564, 198,1003,926,798,743,615,432,304,977,849,666,538,172,900,772,717,589,406,278, 951,823,640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976,848, 665,537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,196, 1001,924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276,949, 821,766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663, 535,480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,194, 922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819, 764,636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,533, 478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920, 792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762, 634,140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476, 348,893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790, 735,607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632, 138,994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346, 891,708,580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733, 605,422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136, 992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889, 706,578,212,1017,940,812,757,629,446,318,991,863,680,552,186,914,786,731,603, 420,292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990, 862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887,704, 576,210,1015,938,810,755,627,444,316,989,861,678,550,184,912,784,729,601,418, 290,963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860, 677,549,494,366,911,783,728,600,234,962,834,651,523,468,340,885,702,574,208, 1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727,599,416,288,961, 833,650,522,156,884,701,573,390,262,1012,935,807,752,624,130,986,858,675,547, 492,364,909,781,726,598,232,960,832,649,521,466,338,883,700,572,206,1011,934, 806,751,623,440,312,985,857,674,546,180,908,780,725,597,414,286,959,831,648, 520,154,882,699,571,388,260,1010,805,750,128,984,673,362,907,596,230,830,519, 464,698,1009,932,621,310,855,544,178,778,723,412,957,646,880,569,258,803,748, 126,982,671,360,905,594,228,828,517,462,696,1007] [views:debug,2014-08-19T16:50:26.558,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/511. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:26.558,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",511,active,0} [ns_server:debug,2014-08-19T16:50:26.729,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 509. Nacking mccouch update. [views:debug,2014-08-19T16:50:26.729,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/509. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:26.729,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",509,active,0} [ns_server:debug,2014-08-19T16:50:26.731,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,853,670,542,176,904,776,721,593,410,282,955,827, 644,516,150,878,695,567,384,256,1006,929,801,746,618,252,124,980,852,669,541, 486,358,903,775,720,592,226,954,826,643,515,460,332,877,694,566,511,200,1005, 928,800,745,617,434,306,979,851,668,540,174,902,774,719,591,408,280,953,825, 642,514,148,876,693,565,510,382,1004,927,799,744,616,250,122,978,850,667,539, 484,356,901,773,718,590,224,952,824,641,513,458,330,875,692,564,509,198,1003, 926,798,743,615,432,304,977,849,666,538,172,900,772,717,589,406,278,951,823, 640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976,848,665,537, 482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,196,1001,924, 796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276,949,821,766, 638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,535,480, 352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,194,922,794, 739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764,636, 142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478,350, 895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920,792,737, 609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634,140, 996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735,607, 424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994, 866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708, 580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605,422, 294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864, 681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706,578, 212,1017,940,812,757,629,446,318,991,863,680,552,186,914,786,731,603,420,292, 965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679, 551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887,704,576,210, 1015,938,810,755,627,444,316,989,861,678,550,184,912,784,729,601,418,290,963, 835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860,677,549, 494,366,911,783,728,600,234,962,834,651,523,468,340,885,702,574,208,1013,936, 808,753,625,442,314,987,859,676,548,182,910,782,727,599,416,288,961,833,650, 522,156,884,701,573,390,262,1012,935,807,752,624,130,986,858,675,547,492,364, 909,781,726,598,232,960,832,649,521,466,338,883,700,572,206,1011,934,806,751, 623,440,312,985,857,674,546,180,908,780,725,597,414,286,959,831,648,520,154, 882,699,571,388,260,1010,805,750,128,984,673,362,907,596,230,830,519,464,698, 1009,932,621,310,855,544,178,778,723,412,957,646,880,569,258,803,748,126,982, 671,360,905,594,228,828,517,462,696,1007,930,619,308] [views:debug,2014-08-19T16:50:26.814,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/509. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:26.814,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",509,active,0} [ns_server:debug,2014-08-19T16:50:26.988,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 507. Nacking mccouch update. [views:debug,2014-08-19T16:50:26.988,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/507. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:26.988,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",507,active,0} [ns_server:debug,2014-08-19T16:50:26.990,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,853,670,542,176,904,776,721,593,410,282,955,827, 644,516,150,878,695,567,384,256,1006,929,801,746,618,252,124,980,852,669,541, 486,358,903,775,720,592,226,954,826,643,515,460,332,877,694,566,511,200,1005, 928,800,745,617,434,306,979,851,668,540,174,902,774,719,591,408,280,953,825, 642,514,148,876,693,565,510,382,1004,927,799,744,616,250,122,978,850,667,539, 484,356,901,773,718,590,224,952,824,641,513,458,330,875,692,564,509,198,1003, 926,798,743,615,432,304,977,849,666,538,172,900,772,717,589,406,278,951,823, 640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976,848,665,537, 482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,507,196,1001, 924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276,949,821, 766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,535, 480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,194,922, 794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764, 636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478, 350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920,792, 737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634, 140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733,605, 422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992, 864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706, 578,212,1017,940,812,757,629,446,318,991,863,680,552,186,914,786,731,603,420, 292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862, 679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887,704,576, 210,1015,938,810,755,627,444,316,989,861,678,550,184,912,784,729,601,418,290, 963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860,677, 549,494,366,911,783,728,600,234,962,834,651,523,468,340,885,702,574,208,1013, 936,808,753,625,442,314,987,859,676,548,182,910,782,727,599,416,288,961,833, 650,522,156,884,701,573,390,262,1012,935,807,752,624,130,986,858,675,547,492, 364,909,781,726,598,232,960,832,649,521,466,338,883,700,572,206,1011,934,806, 751,623,440,312,985,857,674,546,180,908,780,725,597,414,286,959,831,648,520, 154,882,699,571,388,260,1010,805,750,128,984,673,362,907,596,230,830,519,464, 698,1009,932,621,310,855,544,178,778,723,412,957,646,880,569,258,803,748,126, 982,671,360,905,594,228,828,517,462,696,1007,930,619,308] [views:debug,2014-08-19T16:50:27.072,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/507. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:27.072,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",507,active,0} [ns_server:debug,2014-08-19T16:50:27.222,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 505. Nacking mccouch update. [views:debug,2014-08-19T16:50:27.222,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/505. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:27.223,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",505,active,0} [ns_server:debug,2014-08-19T16:50:27.224,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,853,670,542,176,904,776,721,593,410,282,955,827, 644,516,150,878,695,567,384,256,1006,929,801,746,618,252,124,980,852,669,541, 486,358,903,775,720,592,226,954,826,643,515,460,332,877,694,566,511,200,1005, 928,800,745,617,434,306,979,851,668,540,174,902,774,719,591,408,280,953,825, 642,514,148,876,693,565,510,382,1004,927,799,744,616,250,122,978,850,667,539, 484,356,901,773,718,590,224,952,824,641,513,458,330,875,692,564,509,198,1003, 926,798,743,615,432,304,977,849,666,538,172,900,772,717,589,406,278,951,823, 640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976,848,665,537, 482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,507,196,1001, 924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276,949,821, 766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,535, 480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,505,194, 922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819, 764,636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,533, 478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,192,920, 792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762, 634,140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476, 348,893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918,790, 735,607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632, 138,994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346, 891,708,580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788,733, 605,422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136, 992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889, 706,578,212,1017,940,812,757,629,446,318,991,863,680,552,186,914,786,731,603, 420,292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990, 862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887,704, 576,210,1015,938,810,755,627,444,316,989,861,678,550,184,912,784,729,601,418, 290,963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860, 677,549,494,366,911,783,728,600,234,962,834,651,523,468,340,885,702,574,208, 1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727,599,416,288,961, 833,650,522,156,884,701,573,390,262,1012,935,807,752,624,130,986,858,675,547, 492,364,909,781,726,598,232,960,832,649,521,466,338,883,700,572,206,1011,934, 806,751,623,440,312,985,857,674,546,180,908,780,725,597,414,286,959,831,648, 520,154,882,699,571,388,260,1010,805,750,128,984,673,362,907,596,230,830,519, 464,698,1009,932,621,310,855,544,178,778,723,412,957,646,880,569,258,803,748, 126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308] [views:debug,2014-08-19T16:50:27.273,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/505. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:27.273,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",505,active,0} [ns_server:debug,2014-08-19T16:50:27.415,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 503. Nacking mccouch update. [views:debug,2014-08-19T16:50:27.415,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/503. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:27.415,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",503,active,0} [ns_server:debug,2014-08-19T16:50:27.417,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,853,670,542,176,904,776,721,593,410,282,955,827, 644,516,150,878,695,567,384,256,1006,929,801,746,618,252,124,980,852,669,541, 486,358,903,775,720,592,226,954,826,643,515,460,332,877,694,566,511,200,1005, 928,800,745,617,434,306,979,851,668,540,174,902,774,719,591,408,280,953,825, 642,514,148,876,693,565,510,382,1004,927,799,744,616,250,122,978,850,667,539, 484,356,901,773,718,590,224,952,824,641,513,458,330,875,692,564,509,198,1003, 926,798,743,615,432,304,977,849,666,538,172,900,772,717,589,406,278,951,823, 640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976,848,665,537, 482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,507,196,1001, 924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276,949,821, 766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,535, 480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,505,194, 922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819, 764,636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,533, 478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,503,192, 920,792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817, 762,634,140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531, 476,348,893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,190,918, 790,735,607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760, 632,138,994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474, 346,891,708,580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916,788, 733,605,422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630, 136,992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344, 889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,186,914,786,731, 603,420,292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134, 990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887, 704,576,210,1015,938,810,755,627,444,316,989,861,678,550,184,912,784,729,601, 418,290,963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988, 860,677,549,494,366,911,783,728,600,234,962,834,651,523,468,340,885,702,574, 208,1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727,599,416,288, 961,833,650,522,156,884,701,573,390,262,1012,935,807,752,624,130,986,858,675, 547,492,364,909,781,726,598,232,960,832,649,521,466,338,883,700,572,206,1011, 934,806,751,623,440,312,985,857,674,546,180,908,780,725,597,414,286,959,831, 648,520,154,882,699,571,388,260,1010,805,750,128,984,673,362,907,596,230,830, 519,464,698,1009,932,621,310,855,544,178,778,723,412,957,646,880,569,258,803, 748,126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308] [views:debug,2014-08-19T16:50:27.466,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/503. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:27.466,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",503,active,0} [ns_server:debug,2014-08-19T16:50:27.557,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 501. Nacking mccouch update. [views:debug,2014-08-19T16:50:27.557,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/501. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:27.557,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",501,active,0} [ns_server:debug,2014-08-19T16:50:27.559,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,853,670,542,176,904,776,721,593,410,282,955,827, 644,516,150,878,695,567,384,256,1006,929,801,746,618,252,124,980,852,669,541, 486,358,903,775,720,592,226,954,826,643,515,460,332,877,694,566,511,200,1005, 928,800,745,617,434,306,979,851,668,540,174,902,774,719,591,408,280,953,825, 642,514,148,876,693,565,510,382,1004,927,799,744,616,250,122,978,850,667,539, 484,356,901,773,718,590,224,952,824,641,513,458,330,875,692,564,509,198,1003, 926,798,743,615,432,304,977,849,666,538,172,900,772,717,589,406,278,951,823, 640,512,146,874,691,563,508,380,1002,925,797,742,614,248,120,976,848,665,537, 482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,507,196,1001, 924,796,741,613,430,302,975,847,664,536,170,898,770,715,587,404,276,949,821, 766,638,144,872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,535, 480,352,897,769,714,586,220,948,820,765,637,454,326,999,871,688,560,505,194, 922,794,739,611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819, 764,636,142,998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,533, 478,350,895,712,584,218,1023,946,818,763,635,452,324,997,869,686,558,503,192, 920,792,737,609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817, 762,634,140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531, 476,348,893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,501,190, 918,790,735,607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815, 760,632,138,994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529, 474,346,891,708,580,214,1019,942,814,759,631,448,320,993,865,682,554,188,916, 788,733,605,422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758, 630,136,992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472, 344,889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,186,914,786, 731,603,420,292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628, 134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342, 887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,184,912,784,729, 601,418,290,963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132, 988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468,340,885,702, 574,208,1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727,599,416, 288,961,833,650,522,156,884,701,573,390,262,1012,935,807,752,624,130,986,858, 675,547,492,364,909,781,726,598,232,960,832,649,521,466,338,883,700,572,206, 1011,934,806,751,623,440,312,985,857,674,546,180,908,780,725,597,414,286,959, 831,648,520,154,882,699,571,388,260,1010,805,750,128,984,673,362,907,596,230, 830,519,464,698,1009,932,621,310,855,544,178,778,723,412,957,646,880,569,258, 803,748,126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308] [views:debug,2014-08-19T16:50:27.625,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/501. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:27.625,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",501,active,0} [ns_server:debug,2014-08-19T16:50:27.725,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 499. Nacking mccouch update. [views:debug,2014-08-19T16:50:27.725,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/499. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:27.725,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",499,active,0} [ns_server:debug,2014-08-19T16:50:27.727,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,670,904,776,721,593,410,282,955,827,644,516,150, 878,695,567,384,256,1006,929,801,746,618,252,124,980,852,669,541,486,358,903, 775,720,592,226,954,826,643,515,460,332,877,694,566,511,200,1005,928,800,745, 617,434,306,979,851,668,540,174,902,774,719,591,408,280,953,825,642,514,148, 876,693,565,510,382,1004,927,799,744,616,250,122,978,850,667,539,484,356,901, 773,718,590,224,952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743, 615,432,304,977,849,666,538,172,900,772,717,589,406,278,951,823,640,512,146, 874,691,563,508,380,1002,925,797,742,614,248,120,976,848,665,537,482,354,899, 771,716,588,222,950,822,767,639,456,328,873,690,562,507,196,1001,924,796,741, 613,430,302,975,847,664,536,170,898,770,715,587,404,276,949,821,766,638,144, 872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,535,480,352,897, 769,714,586,220,948,820,765,637,454,326,999,871,688,560,505,194,922,794,739, 611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764,636,142, 998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895, 712,584,218,1023,946,818,763,635,452,324,997,869,686,558,503,192,920,792,737, 609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634,140, 996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136, 992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889, 706,578,212,1017,940,812,757,629,446,318,991,863,680,552,186,914,786,731,603, 420,292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990, 862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887,704, 576,210,1015,938,810,755,627,444,316,989,861,678,550,184,912,784,729,601,418, 290,963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860, 677,549,494,366,911,783,728,600,234,962,834,651,523,468,340,885,702,574,208, 1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727,599,416,288,961, 833,650,522,156,884,701,573,390,262,1012,935,807,752,624,130,986,858,675,547, 492,364,909,781,726,598,232,960,832,649,521,466,338,883,700,572,206,1011,934, 806,751,623,440,312,985,857,674,546,180,908,780,725,597,414,286,959,831,648, 520,154,882,699,571,388,260,1010,805,750,128,984,673,362,907,596,230,830,519, 464,698,1009,932,621,310,855,544,178,778,723,412,957,646,880,569,258,803,748, 126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853,542,176] [views:debug,2014-08-19T16:50:27.758,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/499. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:27.759,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",499,active,0} [ns_server:debug,2014-08-19T16:50:27.834,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 497. Nacking mccouch update. [views:debug,2014-08-19T16:50:27.834,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/497. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:27.834,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",497,active,0} [ns_server:debug,2014-08-19T16:50:27.836,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,670,904,776,721,593,410,282,955,827,644,516,150, 878,695,567,384,256,1006,929,801,746,618,252,124,980,852,669,541,486,358,903, 775,720,592,226,954,826,643,515,460,332,877,694,566,511,200,1005,928,800,745, 617,434,306,979,851,668,540,174,902,774,719,591,408,280,953,825,642,514,148, 876,693,565,510,382,1004,927,799,744,616,250,122,978,850,667,539,484,356,901, 773,718,590,224,952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743, 615,432,304,977,849,666,538,172,900,772,717,589,406,278,951,823,640,512,146, 874,691,563,508,380,1002,925,797,742,614,248,120,976,848,665,537,482,354,899, 771,716,588,222,950,822,767,639,456,328,873,690,562,507,196,1001,924,796,741, 613,430,302,975,847,664,536,170,898,770,715,587,404,276,949,821,766,638,144, 872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,535,480,352,897, 769,714,586,220,948,820,765,637,454,326,999,871,688,560,505,194,922,794,739, 611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764,636,142, 998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895, 712,584,218,1023,946,818,763,635,452,324,997,869,686,558,503,192,920,792,737, 609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634,140, 996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136, 992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889, 706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731, 603,420,292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134, 990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887, 704,576,210,1015,938,810,755,627,444,316,989,861,678,550,184,912,784,729,601, 418,290,963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988, 860,677,549,494,366,911,783,728,600,234,962,834,651,523,468,340,885,702,574, 208,1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727,599,416,288, 961,833,650,522,156,884,701,573,390,262,1012,935,807,752,624,130,986,858,675, 547,492,364,909,781,726,598,232,960,832,649,521,466,338,883,700,572,206,1011, 934,806,751,623,440,312,985,857,674,546,180,908,780,725,597,414,286,959,831, 648,520,154,882,699,571,388,260,1010,805,750,128,984,673,362,907,596,230,830, 519,464,698,1009,932,621,310,855,544,178,778,723,412,957,646,880,569,258,803, 748,126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853,542,176] [views:debug,2014-08-19T16:50:27.869,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/497. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:27.869,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",497,active,0} [ns_server:debug,2014-08-19T16:50:27.944,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 495. Nacking mccouch update. [views:debug,2014-08-19T16:50:27.944,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/495. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:27.945,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",495,active,0} [ns_server:debug,2014-08-19T16:50:27.946,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,670,904,776,721,593,410,282,955,827,644,516,150, 878,695,567,384,256,1006,929,801,746,618,252,124,980,852,669,541,486,358,903, 775,720,592,226,954,826,643,515,460,332,877,694,566,511,200,1005,928,800,745, 617,434,306,979,851,668,540,174,902,774,719,591,408,280,953,825,642,514,148, 876,693,565,510,382,1004,927,799,744,616,250,122,978,850,667,539,484,356,901, 773,718,590,224,952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743, 615,432,304,977,849,666,538,172,900,772,717,589,406,278,951,823,640,512,146, 874,691,563,508,380,1002,925,797,742,614,248,120,976,848,665,537,482,354,899, 771,716,588,222,950,822,767,639,456,328,873,690,562,507,196,1001,924,796,741, 613,430,302,975,847,664,536,170,898,770,715,587,404,276,949,821,766,638,144, 872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,535,480,352,897, 769,714,586,220,948,820,765,637,454,326,999,871,688,560,505,194,922,794,739, 611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764,636,142, 998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895, 712,584,218,1023,946,818,763,635,452,324,997,869,686,558,503,192,920,792,737, 609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634,140, 996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136, 992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889, 706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731, 603,420,292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134, 990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887, 704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912,784,729, 601,418,290,963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132, 988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468,340,885,702, 574,208,1013,936,808,753,625,442,314,987,859,676,548,182,910,782,727,599,416, 288,961,833,650,522,156,884,701,573,390,262,1012,935,807,752,624,130,986,858, 675,547,492,364,909,781,726,598,232,960,832,649,521,466,338,883,700,572,206, 1011,934,806,751,623,440,312,985,857,674,546,180,908,780,725,597,414,286,959, 831,648,520,154,882,699,571,388,260,1010,805,750,128,984,673,362,907,596,230, 830,519,464,698,1009,932,621,310,855,544,178,778,723,412,957,646,880,569,258, 803,748,126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853,542, 176] [ns_server:debug,2014-08-19T16:50:27.950,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,938, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_938_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_938_'ns_1@10.242.238.90'">>}]}, {move_state,427, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_427_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_427_'ns_1@10.242.238.91'">>}]}, {move_state,683, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_683_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_683_'ns_1@10.242.238.91'">>}]}, {move_state,939, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_939_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_939_'ns_1@10.242.238.90'">>}]}, {move_state,428, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_428_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_428_'ns_1@10.242.238.91'">>}]}, {move_state,684, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_684_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_684_'ns_1@10.242.238.91'">>}]}, {move_state,940, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_940_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_940_'ns_1@10.242.238.90'">>}]}, {move_state,429, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_429_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_429_'ns_1@10.242.238.91'">>}]}, {move_state,685, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_685_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_685_'ns_1@10.242.238.91'">>}]}, {move_state,941, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_941_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_941_'ns_1@10.242.238.90'">>}]}, {move_state,430, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_430_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_430_'ns_1@10.242.238.91'">>}]}, {move_state,686, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_686_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_686_'ns_1@10.242.238.91'">>}]}, {move_state,942, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_942_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_942_'ns_1@10.242.238.90'">>}]}, {move_state,431, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_431_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_431_'ns_1@10.242.238.91'">>}]}, {move_state,687, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_687_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_687_'ns_1@10.242.238.91'">>}]}, {move_state,943, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_943_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_943_'ns_1@10.242.238.90'">>}]}, {move_state,432, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_432_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_432_'ns_1@10.242.238.91'">>}]}, {move_state,688, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_688_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_688_'ns_1@10.242.238.91'">>}]}, {move_state,944, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_944_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_944_'ns_1@10.242.238.90'">>}]}, {move_state,433, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_433_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_433_'ns_1@10.242.238.91'">>}]}, {move_state,689, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_689_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_689_'ns_1@10.242.238.91'">>}]}, {move_state,945, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_945_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_945_'ns_1@10.242.238.90'">>}]}, {move_state,434, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_434_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_434_'ns_1@10.242.238.91'">>}]}, {move_state,690, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_690_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_690_'ns_1@10.242.238.91'">>}]}, {move_state,946, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_946_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_946_'ns_1@10.242.238.90'">>}]}, {move_state,435, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_435_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_435_'ns_1@10.242.238.91'">>}]}, {move_state,691, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_691_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_691_'ns_1@10.242.238.91'">>}]}, {move_state,947, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_947_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_947_'ns_1@10.242.238.90'">>}]}, {move_state,436, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_436_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_436_'ns_1@10.242.238.91'">>}]}, {move_state,692, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_692_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_692_'ns_1@10.242.238.91'">>}]}, {move_state,948, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_948_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_948_'ns_1@10.242.238.90'">>}]}, {move_state,437, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_437_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_437_'ns_1@10.242.238.91'">>}]}, {move_state,693, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_693_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_693_'ns_1@10.242.238.91'">>}]}, {move_state,949, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_949_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_949_'ns_1@10.242.238.90'">>}]}, {move_state,438, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_438_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_438_'ns_1@10.242.238.91'">>}]}, {move_state,694, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_694_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_694_'ns_1@10.242.238.91'">>}]}, {move_state,950, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_950_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_950_'ns_1@10.242.238.90'">>}]}, {move_state,439, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_439_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_439_'ns_1@10.242.238.91'">>}]}, {move_state,695, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_695_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_695_'ns_1@10.242.238.91'">>}]}, {move_state,951, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_951_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_951_'ns_1@10.242.238.90'">>}]}, {move_state,440, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_440_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_440_'ns_1@10.242.238.91'">>}]}, {move_state,696, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_696_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_696_'ns_1@10.242.238.91'">>}]}, {move_state,952, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_952_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_952_'ns_1@10.242.238.90'">>}]}, {move_state,441, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_441_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_441_'ns_1@10.242.238.91'">>}]}, {move_state,697, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_697_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_697_'ns_1@10.242.238.91'">>}]}, {move_state,953, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_953_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_953_'ns_1@10.242.238.90'">>}]}, {move_state,442, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_442_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_442_'ns_1@10.242.238.91'">>}]}, {move_state,698, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_698_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_698_'ns_1@10.242.238.91'">>}]}, {move_state,954, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_954_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_954_'ns_1@10.242.238.90'">>}]}, {move_state,443, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_443_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_443_'ns_1@10.242.238.91'">>}]}, {move_state,699, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_699_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_699_'ns_1@10.242.238.91'">>}]}, {move_state,955, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_955_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_955_'ns_1@10.242.238.90'">>}]}, {move_state,444, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_444_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_444_'ns_1@10.242.238.91'">>}]}, {move_state,700, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_700_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_700_'ns_1@10.242.238.91'">>}]}, {move_state,956, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_956_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_956_'ns_1@10.242.238.90'">>}]}, {move_state,445, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_445_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_445_'ns_1@10.242.238.91'">>}]}, {move_state,701, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_701_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_701_'ns_1@10.242.238.91'">>}]}, {move_state,957, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_957_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_957_'ns_1@10.242.238.90'">>}]}, {move_state,446, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_446_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_446_'ns_1@10.242.238.91'">>}]}, {move_state,702, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_702_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_702_'ns_1@10.242.238.91'">>}]}, {move_state,958, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_958_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_958_'ns_1@10.242.238.90'">>}]}, {move_state,447, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_447_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_447_'ns_1@10.242.238.91'">>}]}, {move_state,959, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_959_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_959_'ns_1@10.242.238.90'">>}]}, {move_state,703, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_703_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_703_'ns_1@10.242.238.91'">>}]}] [ns_server:debug,2014-08-19T16:50:27.955,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 938, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 427, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 683, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 939, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 428, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 684, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 940, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 429, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 685, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 941, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 430, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 686, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 942, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 431, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 687, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 943, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 432, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 688, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 944, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 433, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 689, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 945, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 434, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 690, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 946, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 435, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 691, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 947, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 436, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 692, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 948, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 437, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 693, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 949, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 438, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 694, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 950, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 439, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 695, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 951, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 440, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 696, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 952, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 441, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 697, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 953, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 442, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 698, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 954, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 443, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 699, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 955, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 444, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 700, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 956, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.985,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 445, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.986,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 701, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.986,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 957, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.987,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 446, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.987,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 702, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.988,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 958, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.988,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 447, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:27.988,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 959, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:27.989,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 703, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [views:debug,2014-08-19T16:50:27.995,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/495. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:27.995,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",495,active,0} [ns_server:debug,2014-08-19T16:50:28.070,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 493. Nacking mccouch update. [views:debug,2014-08-19T16:50:28.070,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/493. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:28.070,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",493,active,0} [ns_server:debug,2014-08-19T16:50:28.072,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,670,904,776,721,593,410,282,955,827,644,516,150, 878,695,567,384,256,1006,929,801,746,618,252,124,980,852,669,541,486,358,903, 775,720,592,226,954,826,643,515,460,332,877,694,566,511,200,1005,928,800,745, 617,434,306,979,851,668,540,174,902,774,719,591,408,280,953,825,642,514,148, 876,693,565,510,382,1004,927,799,744,616,250,122,978,850,667,539,484,356,901, 773,718,590,224,952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743, 615,432,304,977,849,666,538,172,900,772,717,589,406,278,951,823,640,512,146, 874,691,563,508,380,1002,925,797,742,614,248,120,976,848,665,537,482,354,899, 771,716,588,222,950,822,767,639,456,328,873,690,562,507,196,1001,924,796,741, 613,430,302,975,847,664,536,170,898,770,715,587,404,276,949,821,766,638,144, 872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,535,480,352,897, 769,714,586,220,948,820,765,637,454,326,999,871,688,560,505,194,922,794,739, 611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764,636,142, 998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895, 712,584,218,1023,946,818,763,635,452,324,997,869,686,558,503,192,920,792,737, 609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634,140, 996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136, 992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889, 706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731, 603,420,292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134, 990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887, 704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912,784,729, 601,418,290,963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132, 988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468,340,885,702, 574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910,782,727,599, 416,288,961,833,650,522,156,884,701,573,390,262,1012,935,807,752,624,130,986, 858,675,547,492,364,909,781,726,598,232,960,832,649,521,466,338,883,700,572, 206,1011,934,806,751,623,440,312,985,857,674,546,180,908,780,725,597,414,286, 959,831,648,520,154,882,699,571,388,260,1010,805,750,128,984,673,362,907,596, 230,830,519,464,698,1009,932,621,310,855,544,178,778,723,412,957,646,880,569, 258,803,748,126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853, 542,176] [views:debug,2014-08-19T16:50:28.104,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/493. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:28.105,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",493,active,0} [ns_server:debug,2014-08-19T16:50:28.208,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 491. Nacking mccouch update. [views:debug,2014-08-19T16:50:28.208,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/491. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:28.208,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",491,active,0} [ns_server:debug,2014-08-19T16:50:28.210,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,670,904,776,721,593,410,282,955,827,644,516,150, 878,695,567,384,256,1006,929,801,746,618,252,124,980,852,669,541,486,358,903, 775,720,592,226,954,826,643,515,460,332,877,694,566,511,200,1005,928,800,745, 617,434,306,979,851,668,540,174,902,774,719,591,408,280,953,825,642,514,148, 876,693,565,510,382,1004,927,799,744,616,250,122,978,850,667,539,484,356,901, 773,718,590,224,952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743, 615,432,304,977,849,666,538,172,900,772,717,589,406,278,951,823,640,512,146, 874,691,563,508,380,1002,925,797,742,614,248,120,976,848,665,537,482,354,899, 771,716,588,222,950,822,767,639,456,328,873,690,562,507,196,1001,924,796,741, 613,430,302,975,847,664,536,170,898,770,715,587,404,276,949,821,766,638,144, 872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,535,480,352,897, 769,714,586,220,948,820,765,637,454,326,999,871,688,560,505,194,922,794,739, 611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764,636,142, 998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895, 712,584,218,1023,946,818,763,635,452,324,997,869,686,558,503,192,920,792,737, 609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634,140, 996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136, 992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889, 706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731, 603,420,292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134, 990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887, 704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912,784,729, 601,418,290,963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132, 988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468,340,885,702, 574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910,782,727,599, 416,288,961,833,650,522,156,884,701,573,390,262,1012,935,807,752,624,130,986, 858,675,547,492,364,909,781,726,598,232,960,832,649,521,466,338,883,700,572, 206,1011,934,806,751,623,440,312,985,857,674,546,491,180,908,780,725,597,414, 286,959,831,648,520,154,882,699,571,388,260,1010,805,750,128,984,673,362,907, 596,230,830,519,464,698,1009,932,621,310,855,544,178,778,723,412,957,646,880, 569,258,803,748,126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308, 853,542,176] [views:debug,2014-08-19T16:50:28.267,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/491. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:28.267,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",491,active,0} [ns_server:debug,2014-08-19T16:50:28.342,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 489. Nacking mccouch update. [views:debug,2014-08-19T16:50:28.342,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/489. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:28.342,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",489,active,0} [ns_server:debug,2014-08-19T16:50:28.344,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,670,904,593,282,955,827,644,516,150,878,695,567, 384,256,1006,929,801,746,618,252,124,980,852,669,541,486,358,903,775,720,592, 226,954,826,643,515,460,332,877,694,566,511,200,1005,928,800,745,617,434,306, 979,851,668,540,174,902,774,719,591,408,280,953,825,642,514,148,876,693,565, 510,382,1004,927,799,744,616,250,122,978,850,667,539,484,356,901,773,718,590, 224,952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743,615,432,304, 977,849,666,538,172,900,772,717,589,406,278,951,823,640,512,146,874,691,563, 508,380,1002,925,797,742,614,248,120,976,848,665,537,482,354,899,771,716,588, 222,950,822,767,639,456,328,873,690,562,507,196,1001,924,796,741,613,430,302, 975,847,664,536,170,898,770,715,587,404,276,949,821,766,638,144,872,689,561, 506,378,1000,923,795,740,612,246,118,974,846,663,535,480,352,897,769,714,586, 220,948,820,765,637,454,326,999,871,688,560,505,194,922,794,739,611,428,300, 973,845,662,534,168,896,768,713,585,402,274,947,819,764,636,142,998,870,687, 559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712,584,218, 1023,946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298, 971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634,140,996,868,685, 557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710,582,216, 1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296, 969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994,866,683, 555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422,294, 967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864,681, 553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706,578,212, 1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731,603,420,292, 965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679, 551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887,704,576,210, 1015,938,810,755,627,444,316,989,861,678,550,495,184,912,784,729,601,418,290, 963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860,677, 549,494,366,911,783,728,600,234,962,834,651,523,468,340,885,702,574,208,1013, 936,808,753,625,442,314,987,859,676,548,493,182,910,782,727,599,416,288,961, 833,650,522,156,884,701,573,390,262,1012,935,807,752,624,130,986,858,675,547, 492,364,909,781,726,598,232,960,832,649,521,466,338,883,700,572,206,1011,934, 806,751,623,440,312,985,857,674,546,491,180,908,780,725,597,414,286,959,831, 648,520,154,882,699,571,388,260,1010,805,750,128,984,673,362,907,596,230,830, 519,464,698,1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569,258, 803,748,126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853,542, 176,776,721,410] [views:debug,2014-08-19T16:50:28.393,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/489. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:28.393,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",489,active,0} [ns_server:debug,2014-08-19T16:50:28.468,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 487. Nacking mccouch update. [views:debug,2014-08-19T16:50:28.468,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/487. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:28.468,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",487,active,0} [ns_server:debug,2014-08-19T16:50:28.469,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,670,904,593,282,955,827,644,516,150,878,695,567, 384,256,1006,929,801,746,618,252,124,980,852,669,541,486,358,903,775,720,592, 226,954,826,643,515,460,332,877,694,566,511,200,1005,928,800,745,617,434,306, 979,851,668,540,174,902,774,719,591,408,280,953,825,642,514,148,876,693,565, 510,382,1004,927,799,744,616,250,122,978,850,667,539,484,356,901,773,718,590, 224,952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743,615,432,304, 977,849,666,538,172,900,772,717,589,406,278,951,823,640,512,146,874,691,563, 508,380,1002,925,797,742,614,248,120,976,848,665,537,482,354,899,771,716,588, 222,950,822,767,639,456,328,873,690,562,507,196,1001,924,796,741,613,430,302, 975,847,664,536,170,898,770,715,587,404,276,949,821,766,638,144,872,689,561, 506,378,1000,923,795,740,612,246,118,974,846,663,535,480,352,897,769,714,586, 220,948,820,765,637,454,326,999,871,688,560,505,194,922,794,739,611,428,300, 973,845,662,534,168,896,768,713,585,402,274,947,819,764,636,142,998,870,687, 559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712,584,218, 1023,946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298, 971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634,140,996,868,685, 557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710,582,216, 1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296, 969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994,866,683, 555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422,294, 967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864,681, 553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706,578,212, 1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731,603,420,292, 965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862,679, 551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887,704,576,210, 1015,938,810,755,627,444,316,989,861,678,550,495,184,912,784,729,601,418,290, 963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860,677, 549,494,366,911,783,728,600,234,962,834,651,523,468,340,885,702,574,208,1013, 936,808,753,625,442,314,987,859,676,548,493,182,910,782,727,599,416,288,961, 833,650,522,156,884,701,573,390,262,1012,935,807,752,624,130,986,858,675,547, 492,364,909,781,726,598,232,960,832,649,521,466,338,883,700,572,206,1011,934, 806,751,623,440,312,985,857,674,546,491,180,908,780,725,597,414,286,959,831, 648,520,154,882,699,571,388,260,1010,805,750,128,984,673,362,907,596,230,830, 519,464,698,1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569,258, 803,748,126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853,542, 487,176,776,721,410] [views:debug,2014-08-19T16:50:28.504,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/487. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:28.504,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",487,active,0} [ns_server:debug,2014-08-19T16:50:28.603,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 485. Nacking mccouch update. [views:debug,2014-08-19T16:50:28.603,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/485. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:28.603,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",485,active,0} [ns_server:debug,2014-08-19T16:50:28.605,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,670,904,593,282,955,827,644,516,150,878,695,567, 384,256,1006,929,801,746,618,252,124,980,852,669,541,486,358,903,775,720,592, 226,954,826,643,515,460,332,877,694,566,511,200,1005,928,800,745,617,434,306, 979,851,668,540,485,174,902,774,719,591,408,280,953,825,642,514,148,876,693, 565,510,382,1004,927,799,744,616,250,122,978,850,667,539,484,356,901,773,718, 590,224,952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743,615,432, 304,977,849,666,538,172,900,772,717,589,406,278,951,823,640,512,146,874,691, 563,508,380,1002,925,797,742,614,248,120,976,848,665,537,482,354,899,771,716, 588,222,950,822,767,639,456,328,873,690,562,507,196,1001,924,796,741,613,430, 302,975,847,664,536,170,898,770,715,587,404,276,949,821,766,638,144,872,689, 561,506,378,1000,923,795,740,612,246,118,974,846,663,535,480,352,897,769,714, 586,220,948,820,765,637,454,326,999,871,688,560,505,194,922,794,739,611,428, 300,973,845,662,534,168,896,768,713,585,402,274,947,819,764,636,142,998,870, 687,559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712,584, 218,1023,946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609,426, 298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634,140,996,868, 685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710,582, 216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424, 296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994,866, 683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708,580, 214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422, 294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992,864, 681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706,578, 212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731,603,420, 292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990,862, 679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887,704,576, 210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912,784,729,601,418, 290,963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988,860, 677,549,494,366,911,783,728,600,234,962,834,651,523,468,340,885,702,574,208, 1013,936,808,753,625,442,314,987,859,676,548,493,182,910,782,727,599,416,288, 961,833,650,522,156,884,701,573,390,262,1012,935,807,752,624,130,986,858,675, 547,492,364,909,781,726,598,232,960,832,649,521,466,338,883,700,572,206,1011, 934,806,751,623,440,312,985,857,674,546,491,180,908,780,725,597,414,286,959, 831,648,520,154,882,699,571,388,260,1010,805,750,128,984,673,362,907,596,230, 830,519,464,698,1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569, 258,803,748,126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853, 542,487,176,776,721,410] [views:debug,2014-08-19T16:50:28.687,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/485. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:28.687,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",485,active,0} [ns_server:debug,2014-08-19T16:50:28.862,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 483. Nacking mccouch update. [views:debug,2014-08-19T16:50:28.862,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/483. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:28.863,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",483,active,0} [ns_server:debug,2014-08-19T16:50:28.864,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,670,904,593,282,955,827,644,516,150,878,695,567, 384,256,1006,929,801,746,618,252,124,980,852,669,541,486,358,903,775,720,592, 226,954,826,643,515,460,332,877,694,566,511,200,1005,928,800,745,617,434,306, 979,851,668,540,485,174,902,774,719,591,408,280,953,825,642,514,148,876,693, 565,510,382,1004,927,799,744,616,250,122,978,850,667,539,484,356,901,773,718, 590,224,952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743,615,432, 304,977,849,666,538,483,172,900,772,717,589,406,278,951,823,640,512,146,874, 691,563,508,380,1002,925,797,742,614,248,120,976,848,665,537,482,354,899,771, 716,588,222,950,822,767,639,456,328,873,690,562,507,196,1001,924,796,741,613, 430,302,975,847,664,536,170,898,770,715,587,404,276,949,821,766,638,144,872, 689,561,506,378,1000,923,795,740,612,246,118,974,846,663,535,480,352,897,769, 714,586,220,948,820,765,637,454,326,999,871,688,560,505,194,922,794,739,611, 428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764,636,142,998, 870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712, 584,218,1023,946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609, 426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634,140,996, 868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710, 582,216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607, 424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994, 866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708, 580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605, 422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992, 864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706, 578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731,603, 420,292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990, 862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887,704, 576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912,784,729,601, 418,290,963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988, 860,677,549,494,366,911,783,728,600,234,962,834,651,523,468,340,885,702,574, 208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910,782,727,599,416, 288,961,833,650,522,156,884,701,573,390,262,1012,935,807,752,624,130,986,858, 675,547,492,364,909,781,726,598,232,960,832,649,521,466,338,883,700,572,206, 1011,934,806,751,623,440,312,985,857,674,546,491,180,908,780,725,597,414,286, 959,831,648,520,154,882,699,571,388,260,1010,805,750,128,984,673,362,907,596, 230,830,519,464,698,1009,932,621,310,855,544,489,178,778,723,412,957,646,880, 569,258,803,748,126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308, 853,542,487,176,776,721,410] [views:debug,2014-08-19T16:50:28.930,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/483. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:28.930,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",483,active,0} [ns_server:debug,2014-08-19T16:50:29.096,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 481. Nacking mccouch update. [views:debug,2014-08-19T16:50:29.097,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/481. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:29.097,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",481,active,0} [ns_server:debug,2014-08-19T16:50:29.098,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,670,904,593,282,955,827,644,516,150,878,695,567, 384,256,1006,929,801,746,618,252,124,980,852,669,541,486,358,903,775,720,592, 226,954,826,643,515,460,332,877,694,566,511,200,1005,928,800,745,617,434,306, 979,851,668,540,485,174,902,774,719,591,408,280,953,825,642,514,148,876,693, 565,510,382,1004,927,799,744,616,250,122,978,850,667,539,484,356,901,773,718, 590,224,952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743,615,432, 304,977,849,666,538,483,172,900,772,717,589,406,278,951,823,640,512,146,874, 691,563,508,380,1002,925,797,742,614,248,120,976,848,665,537,482,354,899,771, 716,588,222,950,822,767,639,456,328,873,690,562,507,196,1001,924,796,741,613, 430,302,975,847,664,536,481,170,898,770,715,587,404,276,949,821,766,638,144, 872,689,561,506,378,1000,923,795,740,612,246,118,974,846,663,535,480,352,897, 769,714,586,220,948,820,765,637,454,326,999,871,688,560,505,194,922,794,739, 611,428,300,973,845,662,534,168,896,768,713,585,402,274,947,819,764,636,142, 998,870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895, 712,584,218,1023,946,818,763,635,452,324,997,869,686,558,503,192,920,792,737, 609,426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634,140, 996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136, 992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889, 706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731, 603,420,292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134, 990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887, 704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912,784,729, 601,418,290,963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132, 988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468,340,885,702, 574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910,782,727,599, 416,288,961,833,650,522,156,884,701,573,390,262,1012,935,807,752,624,130,986, 858,675,547,492,364,909,781,726,598,232,960,832,649,521,466,338,883,700,572, 206,1011,934,806,751,623,440,312,985,857,674,546,491,180,908,780,725,597,414, 286,959,831,648,520,154,882,699,571,388,260,1010,805,750,128,984,673,362,907, 596,230,830,519,464,698,1009,932,621,310,855,544,489,178,778,723,412,957,646, 880,569,258,803,748,126,982,671,360,905,594,228,828,517,462,696,1007,930,619, 308,853,542,487,176,776,721,410] [views:debug,2014-08-19T16:50:29.155,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/481. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:29.156,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",481,active,0} [ns_server:info,2014-08-19T16:50:29.156,ns_1@10.242.238.88:<0.20999.0>:ns_orchestrator:handle_info:428]Skipping janitor in state rebalancing [ns_server:debug,2014-08-19T16:50:29.330,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 479. Nacking mccouch update. [views:debug,2014-08-19T16:50:29.331,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/479. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:29.331,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",479,active,0} [ns_server:debug,2014-08-19T16:50:29.332,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,670,904,593,282,827,516,150,878,695,567,384,256, 1006,929,801,746,618,252,124,980,852,669,541,486,358,903,775,720,592,226,954, 826,643,515,460,332,877,694,566,511,200,1005,928,800,745,617,434,306,979,851, 668,540,485,174,902,774,719,591,408,280,953,825,642,514,148,876,693,565,510, 382,1004,927,799,744,616,250,122,978,850,667,539,484,356,901,773,718,590,224, 952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743,615,432,304,977, 849,666,538,483,172,900,772,717,589,406,278,951,823,640,512,146,874,691,563, 508,380,1002,925,797,742,614,248,120,976,848,665,537,482,354,899,771,716,588, 222,950,822,767,639,456,328,873,690,562,507,196,1001,924,796,741,613,430,302, 975,847,664,536,481,170,898,770,715,587,404,276,949,821,766,638,144,872,689, 561,506,378,1000,923,795,740,612,246,118,974,846,663,535,480,352,897,769,714, 586,220,948,820,765,637,454,326,999,871,688,560,505,194,922,794,739,611,428, 300,973,845,662,534,479,168,896,768,713,585,402,274,947,819,764,636,142,998, 870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712, 584,218,1023,946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609, 426,298,971,843,660,532,166,894,711,583,400,272,1022,945,817,762,634,140,996, 868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710, 582,216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607, 424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138,994, 866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708, 580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605, 422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136,992, 864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706, 578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731,603, 420,292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134,990, 862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887,704, 576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912,784,729,601, 418,290,963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132,988, 860,677,549,494,366,911,783,728,600,234,962,834,651,523,468,340,885,702,574, 208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910,782,727,599,416, 288,961,833,650,522,156,884,701,573,390,262,1012,935,807,752,624,130,986,858, 675,547,492,364,909,781,726,598,232,960,832,649,521,466,338,883,700,572,206, 1011,934,806,751,623,440,312,985,857,674,546,491,180,908,780,725,597,414,286, 959,831,648,520,154,882,699,571,388,260,1010,805,750,128,984,673,362,907,596, 230,830,519,464,698,1009,932,621,310,855,544,489,178,778,723,412,957,646,880, 569,258,803,748,126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308, 853,542,487,176,776,721,410,955,644] [views:debug,2014-08-19T16:50:29.389,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/479. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:29.390,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",479,active,0} [ns_server:debug,2014-08-19T16:50:29.564,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 477. Nacking mccouch update. [views:debug,2014-08-19T16:50:29.565,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/477. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:29.565,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",477,active,0} [ns_server:debug,2014-08-19T16:50:29.566,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,670,904,593,282,827,516,150,878,695,567,384,256, 1006,929,801,746,618,252,124,980,852,669,541,486,358,903,775,720,592,226,954, 826,643,515,460,332,877,694,566,511,200,1005,928,800,745,617,434,306,979,851, 668,540,485,174,902,774,719,591,408,280,953,825,642,514,148,876,693,565,510, 382,1004,927,799,744,616,250,122,978,850,667,539,484,356,901,773,718,590,224, 952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743,615,432,304,977, 849,666,538,483,172,900,772,717,589,406,278,951,823,640,512,146,874,691,563, 508,380,1002,925,797,742,614,248,120,976,848,665,537,482,354,899,771,716,588, 222,950,822,767,639,456,328,873,690,562,507,196,1001,924,796,741,613,430,302, 975,847,664,536,481,170,898,770,715,587,404,276,949,821,766,638,144,872,689, 561,506,378,1000,923,795,740,612,246,118,974,846,663,535,480,352,897,769,714, 586,220,948,820,765,637,454,326,999,871,688,560,505,194,922,794,739,611,428, 300,973,845,662,534,479,168,896,768,713,585,402,274,947,819,764,636,142,998, 870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712, 584,218,1023,946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609, 426,298,971,843,660,532,477,166,894,711,583,400,272,1022,945,817,762,634,140, 996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735, 607,424,296,969,841,658,530,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630,136, 992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889, 706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731, 603,420,292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628,134, 990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887, 704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912,784,729, 601,418,290,963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626,132, 988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468,340,885,702, 574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910,782,727,599, 416,288,961,833,650,522,156,884,701,573,390,262,1012,935,807,752,624,130,986, 858,675,547,492,364,909,781,726,598,232,960,832,649,521,466,338,883,700,572, 206,1011,934,806,751,623,440,312,985,857,674,546,491,180,908,780,725,597,414, 286,959,831,648,520,154,882,699,571,388,260,1010,805,750,128,984,673,362,907, 596,230,830,519,464,698,1009,932,621,310,855,544,489,178,778,723,412,957,646, 880,569,258,803,748,126,982,671,360,905,594,228,828,517,462,696,1007,930,619, 308,853,542,487,176,776,721,410,955,644] [views:debug,2014-08-19T16:50:29.624,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/477. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:29.624,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",477,active,0} [ns_server:debug,2014-08-19T16:50:29.699,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 475. Nacking mccouch update. [views:debug,2014-08-19T16:50:29.699,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/475. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:29.699,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",475,active,0} [ns_server:debug,2014-08-19T16:50:29.701,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,670,904,593,282,827,516,150,878,695,567,384,256, 1006,929,801,746,618,252,124,980,852,669,541,486,358,903,775,720,592,226,954, 826,643,515,460,332,877,694,566,511,200,1005,928,800,745,617,434,306,979,851, 668,540,485,174,902,774,719,591,408,280,953,825,642,514,148,876,693,565,510, 382,1004,927,799,744,616,250,122,978,850,667,539,484,356,901,773,718,590,224, 952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743,615,432,304,977, 849,666,538,483,172,900,772,717,589,406,278,951,823,640,512,146,874,691,563, 508,380,1002,925,797,742,614,248,120,976,848,665,537,482,354,899,771,716,588, 222,950,822,767,639,456,328,873,690,562,507,196,1001,924,796,741,613,430,302, 975,847,664,536,481,170,898,770,715,587,404,276,949,821,766,638,144,872,689, 561,506,378,1000,923,795,740,612,246,118,974,846,663,535,480,352,897,769,714, 586,220,948,820,765,637,454,326,999,871,688,560,505,194,922,794,739,611,428, 300,973,845,662,534,479,168,896,768,713,585,402,274,947,819,764,636,142,998, 870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712, 584,218,1023,946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609, 426,298,971,843,660,532,477,166,894,711,583,400,272,1022,945,817,762,634,140, 996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735, 607,424,296,969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632, 138,994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346, 891,708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788, 733,605,422,294,967,839,656,528,162,890,707,579,396,268,1018,941,813,758,630, 136,992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344, 889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786, 731,603,420,292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756,628, 134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342, 887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912,784, 729,601,418,290,963,835,652,524,158,886,703,575,392,264,1014,937,809,754,626, 132,988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468,340,885, 702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910,782,727, 599,416,288,961,833,650,522,156,884,701,573,390,262,1012,935,807,752,624,130, 986,858,675,547,492,364,909,781,726,598,232,960,832,649,521,466,338,883,700, 572,206,1011,934,806,751,623,440,312,985,857,674,546,491,180,908,780,725,597, 414,286,959,831,648,520,154,882,699,571,388,260,1010,805,750,128,984,673,362, 907,596,230,830,519,464,698,1009,932,621,310,855,544,489,178,778,723,412,957, 646,880,569,258,803,748,126,982,671,360,905,594,228,828,517,462,696,1007,930, 619,308,853,542,487,176,776,721,410,955,644] [views:debug,2014-08-19T16:50:29.733,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/475. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:29.733,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",475,active,0} [ns_server:debug,2014-08-19T16:50:29.896,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 473. Nacking mccouch update. [views:debug,2014-08-19T16:50:29.896,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/473. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:29.897,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",473,active,0} [ns_server:debug,2014-08-19T16:50:29.898,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,670,904,593,282,827,516,150,878,695,567,384,256, 1006,929,801,746,618,252,124,980,852,669,541,486,358,903,775,720,592,226,954, 826,643,515,460,332,877,694,566,511,200,1005,928,800,745,617,434,306,979,851, 668,540,485,174,902,774,719,591,408,280,953,825,642,514,148,876,693,565,510, 382,1004,927,799,744,616,250,122,978,850,667,539,484,356,901,773,718,590,224, 952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743,615,432,304,977, 849,666,538,483,172,900,772,717,589,406,278,951,823,640,512,146,874,691,563, 508,380,1002,925,797,742,614,248,120,976,848,665,537,482,354,899,771,716,588, 222,950,822,767,639,456,328,873,690,562,507,196,1001,924,796,741,613,430,302, 975,847,664,536,481,170,898,770,715,587,404,276,949,821,766,638,144,872,689, 561,506,378,1000,923,795,740,612,246,118,974,846,663,535,480,352,897,769,714, 586,220,948,820,765,637,454,326,999,871,688,560,505,194,922,794,739,611,428, 300,973,845,662,534,479,168,896,768,713,585,402,274,947,819,764,636,142,998, 870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712, 584,218,1023,946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609, 426,298,971,843,660,532,477,166,894,711,583,400,272,1022,945,817,762,634,140, 996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735, 607,424,296,969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632, 138,994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346, 891,708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788, 733,605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758, 630,136,992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472, 344,889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914, 786,731,603,420,292,965,837,654,526,160,888,705,577,394,266,1016,939,811,756, 628,134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470, 342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912, 784,729,601,418,290,963,835,652,524,158,886,703,575,392,264,1014,937,809,754, 626,132,988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468,340, 885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910,782, 727,599,416,288,961,833,650,522,156,884,701,573,390,262,1012,935,807,752,624, 130,986,858,675,547,492,364,909,781,726,598,232,960,832,649,521,466,338,883, 700,572,206,1011,934,806,751,623,440,312,985,857,674,546,491,180,908,780,725, 597,414,286,959,831,648,520,154,882,699,571,388,260,1010,805,750,128,984,673, 362,907,596,230,830,519,464,698,1009,932,621,310,855,544,489,178,778,723,412, 957,646,880,569,258,803,748,126,982,671,360,905,594,228,828,517,462,696,1007, 930,619,308,853,542,487,176,776,721,410,955,644] [views:debug,2014-08-19T16:50:29.930,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/473. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:29.931,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",473,active,0} [ns_server:debug,2014-08-19T16:50:30.105,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 471. Nacking mccouch update. [views:debug,2014-08-19T16:50:30.105,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/471. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:30.106,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",471,active,0} [ns_server:debug,2014-08-19T16:50:30.107,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,670,904,593,282,827,516,150,878,695,567,384,256, 1006,929,801,746,618,252,124,980,852,669,541,486,358,903,775,720,592,226,954, 826,643,515,460,332,877,694,566,511,200,1005,928,800,745,617,434,306,979,851, 668,540,485,174,902,774,719,591,408,280,953,825,642,514,148,876,693,565,510, 382,1004,927,799,744,616,250,122,978,850,667,539,484,356,901,773,718,590,224, 952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743,615,432,304,977, 849,666,538,483,172,900,772,717,589,406,278,951,823,640,512,146,874,691,563, 508,380,1002,925,797,742,614,248,120,976,848,665,537,482,354,899,771,716,588, 222,950,822,767,639,456,328,873,690,562,507,196,1001,924,796,741,613,430,302, 975,847,664,536,481,170,898,770,715,587,404,276,949,821,766,638,144,872,689, 561,506,378,1000,923,795,740,612,246,118,974,846,663,535,480,352,897,769,714, 586,220,948,820,765,637,454,326,999,871,688,560,505,194,922,794,739,611,428, 300,973,845,662,534,479,168,896,768,713,585,402,274,947,819,764,636,142,998, 870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712, 584,218,1023,946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609, 426,298,971,843,660,532,477,166,894,711,583,400,272,1022,945,817,762,634,140, 996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735, 607,424,296,969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632, 138,994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346, 891,708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788, 733,605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758, 630,136,992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472, 344,889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914, 786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811, 756,628,134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525, 470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184, 912,784,729,601,418,290,963,835,652,524,158,886,703,575,392,264,1014,937,809, 754,626,132,988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468, 340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910, 782,727,599,416,288,961,833,650,522,156,884,701,573,390,262,1012,935,807,752, 624,130,986,858,675,547,492,364,909,781,726,598,232,960,832,649,521,466,338, 883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,491,180,908,780, 725,597,414,286,959,831,648,520,154,882,699,571,388,260,1010,805,750,128,984, 673,362,907,596,230,830,519,464,698,1009,932,621,310,855,544,489,178,778,723, 412,957,646,880,569,258,803,748,126,982,671,360,905,594,228,828,517,462,696, 1007,930,619,308,853,542,487,176,776,721,410,955,644] [views:debug,2014-08-19T16:50:30.190,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/471. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:30.190,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",471,active,0} [ns_server:debug,2014-08-19T16:50:30.365,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 469. Nacking mccouch update. [views:debug,2014-08-19T16:50:30.366,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/469. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:30.366,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",469,active,0} [ns_server:debug,2014-08-19T16:50:30.367,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,670,904,593,282,827,516,150,695,384,1006,929,801, 746,618,252,124,980,852,669,541,486,358,903,775,720,592,226,954,826,643,515, 460,332,877,694,566,511,200,1005,928,800,745,617,434,306,979,851,668,540,485, 174,902,774,719,591,408,280,953,825,642,514,148,876,693,565,510,382,1004,927, 799,744,616,250,122,978,850,667,539,484,356,901,773,718,590,224,952,824,641, 513,458,330,875,692,564,509,198,1003,926,798,743,615,432,304,977,849,666,538, 483,172,900,772,717,589,406,278,951,823,640,512,146,874,691,563,508,380,1002, 925,797,742,614,248,120,976,848,665,537,482,354,899,771,716,588,222,950,822, 767,639,456,328,873,690,562,507,196,1001,924,796,741,613,430,302,975,847,664, 536,481,170,898,770,715,587,404,276,949,821,766,638,144,872,689,561,506,378, 1000,923,795,740,612,246,118,974,846,663,535,480,352,897,769,714,586,220,948, 820,765,637,454,326,999,871,688,560,505,194,922,794,739,611,428,300,973,845, 662,534,479,168,896,768,713,585,402,274,947,819,764,636,142,998,870,687,559, 504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712,584,218,1023, 946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971, 843,660,532,477,166,894,711,583,400,272,1022,945,817,762,634,140,996,868,685, 557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710,582,216, 1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296, 969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,138,994,866, 683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708,580, 214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422, 294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,136,992, 864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706, 578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731,603, 420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756,628,134, 990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887, 704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912,784,729, 601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014,937,809,754,626, 132,988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468,340,885, 702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910,782,727, 599,416,288,961,833,650,522,156,884,701,573,390,262,1012,935,807,752,624,130, 986,858,675,547,492,364,909,781,726,598,232,960,832,649,521,466,338,883,700, 572,206,1011,934,806,751,623,440,312,985,857,674,546,491,180,908,780,725,597, 414,286,959,831,648,520,154,882,699,571,388,260,1010,805,750,128,984,673,362, 907,596,230,830,519,464,698,1009,932,621,310,855,544,489,178,778,723,412,957, 646,880,569,258,803,748,126,982,671,360,905,594,228,828,517,462,696,1007,930, 619,308,853,542,487,176,776,721,410,955,644,878,567,256] [views:debug,2014-08-19T16:50:30.423,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/469. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:30.424,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",469,active,0} [ns_server:debug,2014-08-19T16:50:30.598,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 467. Nacking mccouch update. [views:debug,2014-08-19T16:50:30.599,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/467. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:30.599,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",467,active,0} [ns_server:debug,2014-08-19T16:50:30.600,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,670,904,593,282,827,516,150,695,384,1006,929,801, 746,618,252,124,980,852,669,541,486,358,903,775,720,592,226,954,826,643,515, 460,332,877,694,566,511,200,1005,928,800,745,617,434,306,979,851,668,540,485, 174,902,774,719,591,408,280,953,825,642,514,148,876,693,565,510,382,1004,927, 799,744,616,250,122,978,850,667,539,484,356,901,773,718,590,224,952,824,641, 513,458,330,875,692,564,509,198,1003,926,798,743,615,432,304,977,849,666,538, 483,172,900,772,717,589,406,278,951,823,640,512,146,874,691,563,508,380,1002, 925,797,742,614,248,120,976,848,665,537,482,354,899,771,716,588,222,950,822, 767,639,456,328,873,690,562,507,196,1001,924,796,741,613,430,302,975,847,664, 536,481,170,898,770,715,587,404,276,949,821,766,638,144,872,689,561,506,378, 1000,923,795,740,612,246,118,974,846,663,535,480,352,897,769,714,586,220,948, 820,765,637,454,326,999,871,688,560,505,194,922,794,739,611,428,300,973,845, 662,534,479,168,896,768,713,585,402,274,947,819,764,636,142,998,870,687,559, 504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712,584,218,1023, 946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971, 843,660,532,477,166,894,711,583,400,272,1022,945,817,762,634,140,996,868,685, 557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710,582,216, 1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296, 969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,138,994,866, 683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708,580, 214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422, 294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,136,992, 864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706, 578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731,603, 420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756,628,134, 990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887, 704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912,784,729, 601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014,937,809,754,626, 132,988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468,340,885, 702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910,782,727, 599,416,288,961,833,650,522,467,156,884,701,573,390,262,1012,935,807,752,624, 130,986,858,675,547,492,364,909,781,726,598,232,960,832,649,521,466,338,883, 700,572,206,1011,934,806,751,623,440,312,985,857,674,546,491,180,908,780,725, 597,414,286,959,831,648,520,154,882,699,571,388,260,1010,805,750,128,984,673, 362,907,596,230,830,519,464,698,1009,932,621,310,855,544,489,178,778,723,412, 957,646,880,569,258,803,748,126,982,671,360,905,594,228,828,517,462,696,1007, 930,619,308,853,542,487,176,776,721,410,955,644,878,567,256] [views:debug,2014-08-19T16:50:30.657,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/467. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:30.658,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",467,active,0} [ns_server:debug,2014-08-19T16:50:30.824,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 465. Nacking mccouch update. [views:debug,2014-08-19T16:50:30.824,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/465. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:30.824,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",465,active,0} [ns_server:debug,2014-08-19T16:50:30.826,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,152,697,386,1008,931,620,254,854,543,488,777,722,956,645,334, 879,568,202,802,747,436,981,670,904,593,282,827,516,150,695,384,1006,929,801, 746,618,252,124,980,852,669,541,486,358,903,775,720,592,226,954,826,643,515, 460,332,877,694,566,511,200,1005,928,800,745,617,434,306,979,851,668,540,485, 174,902,774,719,591,408,280,953,825,642,514,148,876,693,565,510,382,1004,927, 799,744,616,250,122,978,850,667,539,484,356,901,773,718,590,224,952,824,641, 513,458,330,875,692,564,509,198,1003,926,798,743,615,432,304,977,849,666,538, 483,172,900,772,717,589,406,278,951,823,640,512,146,874,691,563,508,380,1002, 925,797,742,614,248,120,976,848,665,537,482,354,899,771,716,588,222,950,822, 767,639,456,328,873,690,562,507,196,1001,924,796,741,613,430,302,975,847,664, 536,481,170,898,770,715,587,404,276,949,821,766,638,144,872,689,561,506,378, 1000,923,795,740,612,246,118,974,846,663,535,480,352,897,769,714,586,220,948, 820,765,637,454,326,999,871,688,560,505,194,922,794,739,611,428,300,973,845, 662,534,479,168,896,768,713,585,402,274,947,819,764,636,142,998,870,687,559, 504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712,584,218,1023, 946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971, 843,660,532,477,166,894,711,583,400,272,1022,945,817,762,634,140,996,868,685, 557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710,582,216, 1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296, 969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,138,994,866, 683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708,580, 214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422, 294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,136,992, 864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706, 578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731,603, 420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756,628,134, 990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887, 704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912,784,729, 601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014,937,809,754,626, 132,988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468,340,885, 702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910,782,727, 599,416,288,961,833,650,522,467,156,884,701,573,390,262,1012,935,807,752,624, 130,986,858,675,547,492,364,909,781,726,598,232,960,832,649,521,466,338,883, 700,572,206,1011,934,806,751,623,440,312,985,857,674,546,491,180,908,780,725, 597,414,286,959,831,648,520,465,154,882,699,571,388,260,1010,805,750,128,984, 673,362,907,596,230,830,519,464,698,1009,932,621,310,855,544,489,178,778,723, 412,957,646,880,569,258,803,748,126,982,671,360,905,594,228,828,517,462,696, 1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567,256] [views:debug,2014-08-19T16:50:30.883,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/465. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:30.883,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",465,active,0} [ns_server:debug,2014-08-19T16:50:30.983,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 463. Nacking mccouch update. [views:debug,2014-08-19T16:50:30.984,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/463. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:30.984,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",463,active,0} [ns_server:debug,2014-08-19T16:50:30.985,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,150,695,384,1006,929, 801,746,618,252,124,980,852,669,541,486,358,903,775,720,592,226,954,826,643, 515,460,332,877,694,566,511,200,1005,928,800,745,617,434,306,979,851,668,540, 485,174,902,774,719,591,408,280,953,825,642,514,148,876,693,565,510,382,1004, 927,799,744,616,250,122,978,850,667,539,484,356,901,773,718,590,224,952,824, 641,513,458,330,875,692,564,509,198,1003,926,798,743,615,432,304,977,849,666, 538,483,172,900,772,717,589,406,278,951,823,640,512,146,874,691,563,508,380, 1002,925,797,742,614,248,120,976,848,665,537,482,354,899,771,716,588,222,950, 822,767,639,456,328,873,690,562,507,196,1001,924,796,741,613,430,302,975,847, 664,536,481,170,898,770,715,587,404,276,949,821,766,638,144,872,689,561,506, 378,1000,923,795,740,612,246,118,974,846,663,535,480,352,897,769,714,586,220, 948,820,765,637,454,326,999,871,688,560,505,194,922,794,739,611,428,300,973, 845,662,534,479,168,896,768,713,585,402,274,947,819,764,636,142,998,870,687, 559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712,584,218, 1023,946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298, 971,843,660,532,477,166,894,711,583,400,272,1022,945,817,762,634,140,996,868, 685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710,582, 216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424, 296,969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,138,994, 866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708, 580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605, 422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,136, 992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889, 706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731, 603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756,628, 134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342, 887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912,784, 729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014,937,809,754, 626,132,988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468,340, 885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910,782, 727,599,416,288,961,833,650,522,467,156,884,701,573,390,262,1012,935,807,752, 624,130,986,858,675,547,492,364,909,781,726,598,232,960,832,649,521,466,338, 883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,491,180,908,780, 725,597,414,286,959,831,648,520,465,154,882,699,571,388,260,1010,805,750,128, 984,673,362,907,596,230,830,519,464,698,1009,932,621,310,855,544,489,178,778, 723,412,957,646,880,569,258,803,748,126,982,671,360,905,594,228,828,517,462, 696,1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567,256] [views:debug,2014-08-19T16:50:31.042,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/463. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:31.043,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",463,active,0} [ns_server:debug,2014-08-19T16:50:31.142,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 461. Nacking mccouch update. [views:debug,2014-08-19T16:50:31.142,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/461. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:31.143,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",461,active,0} [ns_server:debug,2014-08-19T16:50:31.144,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,801,746,618,252,124,980,852,669,541,486,358,903,775,720,592,226,954,826, 643,515,460,332,877,694,566,511,200,1005,928,800,745,617,434,306,979,851,668, 540,485,174,902,774,719,591,408,280,953,825,642,514,148,876,693,565,510,382, 1004,927,799,744,616,250,122,978,850,667,539,484,356,901,773,718,590,224,952, 824,641,513,458,330,875,692,564,509,198,1003,926,798,743,615,432,304,977,849, 666,538,483,172,900,772,717,589,406,278,951,823,640,512,146,874,691,563,508, 380,1002,925,797,742,614,248,120,976,848,665,537,482,354,899,771,716,588,222, 950,822,767,639,456,328,873,690,562,507,196,1001,924,796,741,613,430,302,975, 847,664,536,481,170,898,770,715,587,404,276,949,821,766,638,144,872,689,561, 506,378,1000,923,795,740,612,246,118,974,846,663,535,480,352,897,769,714,586, 220,948,820,765,637,454,326,999,871,688,560,505,194,922,794,739,611,428,300, 973,845,662,534,479,168,896,768,713,585,402,274,947,819,764,636,142,998,870, 687,559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712,584, 218,1023,946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609,426, 298,971,843,660,532,477,166,894,711,583,400,272,1022,945,817,762,634,140,996, 868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710, 582,216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607, 424,296,969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630, 136,992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344, 889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786, 731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756, 628,134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470, 342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912, 784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014,937,809, 754,626,132,988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468, 340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910, 782,727,599,416,288,961,833,650,522,467,156,884,701,573,390,262,1012,935,807, 752,624,130,986,858,675,547,492,364,909,781,726,598,232,960,832,649,521,466, 338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,491,180,908, 780,725,597,414,286,959,831,648,520,465,154,882,699,571,388,260,1010,805,750, 128,984,673,362,907,596,230,830,519,464,698,1009,932,621,310,855,544,489,178, 778,723,412,957,646,880,569,258,803,748,126,982,671,360,905,594,228,828,517, 462,696,1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567,256] [views:debug,2014-08-19T16:50:31.202,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/461. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:31.202,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",461,active,0} [ns_server:debug,2014-08-19T16:50:31.301,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 459. Nacking mccouch update. [views:debug,2014-08-19T16:50:31.302,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/459. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:31.302,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",459,active,0} [ns_server:debug,2014-08-19T16:50:31.303,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,980,852,669,541,486,358,903,775,720,592,226,954,826,643,515,460, 332,877,694,566,511,200,1005,928,800,745,617,434,306,979,851,668,540,485,174, 902,774,719,591,408,280,953,825,642,514,459,148,876,693,565,510,382,1004,927, 799,744,616,250,122,978,850,667,539,484,356,901,773,718,590,224,952,824,641, 513,458,330,875,692,564,509,198,1003,926,798,743,615,432,304,977,849,666,538, 483,172,900,772,717,589,406,278,951,823,640,512,146,874,691,563,508,380,1002, 925,797,742,614,248,120,976,848,665,537,482,354,899,771,716,588,222,950,822, 767,639,456,328,873,690,562,507,196,1001,924,796,741,613,430,302,975,847,664, 536,481,170,898,770,715,587,404,276,949,821,766,638,144,872,689,561,506,378, 1000,923,795,740,612,246,118,974,846,663,535,480,352,897,769,714,586,220,948, 820,765,637,454,326,999,871,688,560,505,194,922,794,739,611,428,300,973,845, 662,534,479,168,896,768,713,585,402,274,947,819,764,636,142,998,870,687,559, 504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712,584,218,1023, 946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971, 843,660,532,477,166,894,711,583,400,272,1022,945,817,762,634,140,996,868,685, 557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710,582,216, 1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296, 969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,138,994,866, 683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708,580, 214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422, 294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,136,992, 864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706, 578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731,603, 420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756,628,134, 990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342,887, 704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912,784,729, 601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014,937,809,754,626, 132,988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468,340,885, 702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910,782,727, 599,416,288,961,833,650,522,467,156,884,701,573,390,262,1012,935,807,752,624, 130,986,858,675,547,492,364,909,781,726,598,232,960,832,649,521,466,338,883, 700,572,206,1011,934,806,751,623,440,312,985,857,674,546,491,180,908,780,725, 597,414,286,959,831,648,520,465,154,882,699,571,388,260,1010,805,750,128,984, 673,362,907,596,230,830,519,464,698,1009,932,621,310,855,544,489,178,778,723, 412,957,646,880,569,258,803,748,126,982,671,360,905,594,228,828,517,462,696, 1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567,256,801,746,124] [views:debug,2014-08-19T16:50:31.336,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/459. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:31.336,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",459,active,0} [ns_server:debug,2014-08-19T16:50:31.429,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 457. Nacking mccouch update. [views:debug,2014-08-19T16:50:31.429,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/457. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:31.429,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",457,active,0} [ns_server:debug,2014-08-19T16:50:31.431,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,980,852,669,541,486,358,903,775,720,592,226,954,826,643,515,460, 332,877,694,566,511,200,1005,928,800,745,617,434,306,979,851,668,540,485,174, 902,774,719,591,408,280,953,825,642,514,459,148,876,693,565,510,382,1004,927, 799,744,616,250,122,978,850,667,539,484,356,901,773,718,590,224,952,824,641, 513,458,330,875,692,564,509,198,1003,926,798,743,615,432,304,977,849,666,538, 483,172,900,772,717,589,406,278,951,823,640,512,457,146,874,691,563,508,380, 1002,925,797,742,614,248,120,976,848,665,537,482,354,899,771,716,588,222,950, 822,767,639,456,328,873,690,562,507,196,1001,924,796,741,613,430,302,975,847, 664,536,481,170,898,770,715,587,404,276,949,821,766,638,144,872,689,561,506, 378,1000,923,795,740,612,246,118,974,846,663,535,480,352,897,769,714,586,220, 948,820,765,637,454,326,999,871,688,560,505,194,922,794,739,611,428,300,973, 845,662,534,479,168,896,768,713,585,402,274,947,819,764,636,142,998,870,687, 559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712,584,218, 1023,946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298, 971,843,660,532,477,166,894,711,583,400,272,1022,945,817,762,634,140,996,868, 685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710,582, 216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424, 296,969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,138,994, 866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708, 580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605, 422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,136, 992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889, 706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731, 603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756,628, 134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342, 887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912,784, 729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014,937,809,754, 626,132,988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468,340, 885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910,782, 727,599,416,288,961,833,650,522,467,156,884,701,573,390,262,1012,935,807,752, 624,130,986,858,675,547,492,364,909,781,726,598,232,960,832,649,521,466,338, 883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,491,180,908,780, 725,597,414,286,959,831,648,520,465,154,882,699,571,388,260,1010,805,750,128, 984,673,362,907,596,230,830,519,464,698,1009,932,621,310,855,544,489,178,778, 723,412,957,646,880,569,258,803,748,126,982,671,360,905,594,228,828,517,462, 696,1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567,256,801,746, 124] [views:debug,2014-08-19T16:50:31.488,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/457. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:31.488,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",457,active,0} [ns_server:debug,2014-08-19T16:50:31.591,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 455. Nacking mccouch update. [views:debug,2014-08-19T16:50:31.592,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/455. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:31.592,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",455,active,0} [ns_server:debug,2014-08-19T16:50:31.593,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,980,852,669,541,486,358,903,775,720,592,226,954,826,643,515,460, 332,877,694,566,511,200,1005,928,800,745,617,434,306,979,851,668,540,485,174, 902,774,719,591,408,280,953,825,642,514,459,148,876,693,565,510,382,1004,927, 799,744,616,250,122,978,850,667,539,484,356,901,773,718,590,224,952,824,641, 513,458,330,875,692,564,509,198,1003,926,798,743,615,432,304,977,849,666,538, 483,172,900,772,717,589,406,278,951,823,640,512,457,146,874,691,563,508,380, 1002,925,797,742,614,248,120,976,848,665,537,482,354,899,771,716,588,222,950, 822,767,639,456,328,873,690,562,507,196,1001,924,796,741,613,430,302,975,847, 664,536,481,170,898,770,715,587,404,276,949,821,766,638,455,144,872,689,561, 506,378,1000,923,795,740,612,246,118,974,846,663,535,480,352,897,769,714,586, 220,948,820,765,637,454,326,999,871,688,560,505,194,922,794,739,611,428,300, 973,845,662,534,479,168,896,768,713,585,402,274,947,819,764,636,142,998,870, 687,559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712,584, 218,1023,946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609,426, 298,971,843,660,532,477,166,894,711,583,400,272,1022,945,817,762,634,140,996, 868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710, 582,216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607, 424,296,969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630, 136,992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344, 889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786, 731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756, 628,134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470, 342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912, 784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014,937,809, 754,626,132,988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468, 340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910, 782,727,599,416,288,961,833,650,522,467,156,884,701,573,390,262,1012,935,807, 752,624,130,986,858,675,547,492,364,909,781,726,598,232,960,832,649,521,466, 338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,491,180,908, 780,725,597,414,286,959,831,648,520,465,154,882,699,571,388,260,1010,805,750, 128,984,673,362,907,596,230,830,519,464,698,1009,932,621,310,855,544,489,178, 778,723,412,957,646,880,569,258,803,748,126,982,671,360,905,594,228,828,517, 462,696,1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567,256,801, 746,124] [views:debug,2014-08-19T16:50:31.642,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/455. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:31.643,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",455,active,0} [ns_server:debug,2014-08-19T16:50:31.734,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 453. Nacking mccouch update. [views:debug,2014-08-19T16:50:31.734,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/453. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:31.735,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",453,active,0} [ns_server:debug,2014-08-19T16:50:31.736,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,980,852,669,541,486,358,903,775,720,592,226,954,826,643,515,460, 332,877,694,566,511,200,1005,928,800,745,617,434,306,979,851,668,540,485,174, 902,774,719,591,408,280,953,825,642,514,459,148,876,693,565,510,382,1004,927, 799,744,616,250,122,978,850,667,539,484,356,901,773,718,590,224,952,824,641, 513,458,330,875,692,564,509,198,1003,926,798,743,615,432,304,977,849,666,538, 483,172,900,772,717,589,406,278,951,823,640,512,457,146,874,691,563,508,380, 1002,925,797,742,614,248,120,976,848,665,537,482,354,899,771,716,588,222,950, 822,767,639,456,328,873,690,562,507,196,1001,924,796,741,613,430,302,975,847, 664,536,481,170,898,770,715,587,404,276,949,821,766,638,455,144,872,689,561, 506,378,1000,923,795,740,612,246,118,974,846,663,535,480,352,897,769,714,586, 220,948,820,765,637,454,326,999,871,688,560,505,194,922,794,739,611,428,300, 973,845,662,534,479,168,896,768,713,585,402,274,947,819,764,636,453,142,998, 870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712, 584,218,1023,946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609, 426,298,971,843,660,532,477,166,894,711,583,400,272,1022,945,817,762,634,140, 996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893, 710,582,216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735, 607,424,296,969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632, 138,994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346, 891,708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788, 733,605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758, 630,136,992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472, 344,889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914, 786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811, 756,628,134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525, 470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184, 912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014,937, 809,754,626,132,988,860,677,549,494,366,911,783,728,600,234,962,834,651,523, 468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182, 910,782,727,599,416,288,961,833,650,522,467,156,884,701,573,390,262,1012,935, 807,752,624,130,986,858,675,547,492,364,909,781,726,598,232,960,832,649,521, 466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,491,180, 908,780,725,597,414,286,959,831,648,520,465,154,882,699,571,388,260,1010,805, 750,128,984,673,362,907,596,230,830,519,464,698,1009,932,621,310,855,544,489, 178,778,723,412,957,646,880,569,258,803,748,126,982,671,360,905,594,228,828, 517,462,696,1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567,256, 801,746,124] [views:debug,2014-08-19T16:50:31.785,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/453. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:31.785,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",453,active,0} [ns_server:debug,2014-08-19T16:50:31.943,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 451. Nacking mccouch update. [views:debug,2014-08-19T16:50:31.943,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/451. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:31.943,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",451,active,0} [ns_server:debug,2014-08-19T16:50:31.945,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,980,852,669,541,486,358,903,775,720,592,226,954,826,643,515,460, 332,877,694,566,511,200,1005,928,800,745,617,434,306,979,851,668,540,485,174, 902,774,719,591,408,280,953,825,642,514,459,148,876,693,565,510,382,1004,927, 799,744,616,250,122,978,850,667,539,484,356,901,773,718,590,224,952,824,641, 513,458,330,875,692,564,509,198,1003,926,798,743,615,432,304,977,849,666,538, 483,172,900,772,717,589,406,278,951,823,640,512,457,146,874,691,563,508,380, 1002,925,797,742,614,248,120,976,848,665,537,482,354,899,771,716,588,222,950, 822,767,639,456,328,873,690,562,507,196,1001,924,796,741,613,430,302,975,847, 664,536,481,170,898,770,715,587,404,276,949,821,766,638,455,144,872,689,561, 506,378,1000,923,795,740,612,246,118,974,846,663,535,480,352,897,769,714,586, 220,948,820,765,637,454,326,999,871,688,560,505,194,922,794,739,611,428,300, 973,845,662,534,479,168,896,768,713,585,402,274,947,819,764,636,453,142,998, 870,687,559,504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712, 584,218,1023,946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609, 426,298,971,843,660,532,477,166,894,711,583,400,272,1022,945,817,762,634,451, 140,996,868,685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348, 893,710,582,216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790, 735,607,424,296,969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760, 632,138,994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474, 346,891,708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916, 788,733,605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813, 758,630,136,992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527, 472,344,889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186, 914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939, 811,756,628,134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653, 525,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495, 184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014, 937,809,754,626,132,988,860,677,549,494,366,911,783,728,600,234,962,834,651, 523,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493, 182,910,782,727,599,416,288,961,833,650,522,467,156,884,701,573,390,262,1012, 935,807,752,624,130,986,858,675,547,492,364,909,781,726,598,232,960,832,649, 521,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,491, 180,908,780,725,597,414,286,959,831,648,520,465,154,882,699,571,388,260,1010, 805,750,128,984,673,362,907,596,230,830,519,464,698,1009,932,621,310,855,544, 489,178,778,723,412,957,646,880,569,258,803,748,126,982,671,360,905,594,228, 828,517,462,696,1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567, 256,801,746,124] [rebalance:info,2014-08-19T16:50:31.948,ns_1@10.242.238.88:<0.6021.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 442 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:31.948,ns_1@10.242.238.88:<0.5938.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 443 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:31.949,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 442 state to active [ns_server:info,2014-08-19T16:50:31.951,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 443 state to active [rebalance:info,2014-08-19T16:50:31.950,ns_1@10.242.238.88:<0.6021.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 442 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:31.952,ns_1@10.242.238.88:<0.5938.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 443 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:31.952,ns_1@10.242.238.88:<0.6021.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:31.953,ns_1@10.242.238.88:<0.5938.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:32.010,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/451. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:32.010,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",451,active,0} [rebalance:info,2014-08-19T16:50:32.035,ns_1@10.242.238.88:<0.5853.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 444 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:32.035,ns_1@10.242.238.88:<0.5789.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 445 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:32.035,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 444 state to active [rebalance:info,2014-08-19T16:50:32.037,ns_1@10.242.238.88:<0.5853.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 444 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:32.037,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 445 state to active [rebalance:info,2014-08-19T16:50:32.038,ns_1@10.242.238.88:<0.5789.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 445 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:32.039,ns_1@10.242.238.88:<0.5853.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:32.039,ns_1@10.242.238.88:<0.5789.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:32.142,ns_1@10.242.238.88:<0.5625.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 447 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:32.142,ns_1@10.242.238.88:<0.5702.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 446 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:32.142,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 447 state to active [rebalance:info,2014-08-19T16:50:32.143,ns_1@10.242.238.88:<0.5625.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 447 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:32.143,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 446 state to active [rebalance:info,2014-08-19T16:50:32.144,ns_1@10.242.238.88:<0.5702.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 446 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:32.145,ns_1@10.242.238.88:<0.5625.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:32.145,ns_1@10.242.238.88:<0.5702.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:32.170,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 449. Nacking mccouch update. [views:debug,2014-08-19T16:50:32.170,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/449. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:32.170,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",449,active,0} [ns_server:debug,2014-08-19T16:50:32.172,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,852,541,486,903,775,720,592,226,954,826,643,515,460,332,877,694, 566,511,200,1005,928,800,745,617,434,306,979,851,668,540,485,174,902,774,719, 591,408,280,953,825,642,514,459,148,876,693,565,510,382,1004,927,799,744,616, 250,122,978,850,667,539,484,356,901,773,718,590,224,952,824,641,513,458,330, 875,692,564,509,198,1003,926,798,743,615,432,304,977,849,666,538,483,172,900, 772,717,589,406,278,951,823,640,512,457,146,874,691,563,508,380,1002,925,797, 742,614,248,120,976,848,665,537,482,354,899,771,716,588,222,950,822,767,639, 456,328,873,690,562,507,196,1001,924,796,741,613,430,302,975,847,664,536,481, 170,898,770,715,587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000, 923,795,740,612,246,118,974,846,663,535,480,352,897,769,714,586,220,948,820, 765,637,454,326,999,871,688,560,505,194,922,794,739,611,428,300,973,845,662, 534,479,168,896,768,713,585,402,274,947,819,764,636,453,142,998,870,687,559, 504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712,584,218,1023, 946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971, 843,660,532,477,166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868, 685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710,582, 216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424, 296,969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630, 136,992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344, 889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786, 731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756, 628,134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470, 342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912, 784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014,937,809, 754,626,132,988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468, 340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910, 782,727,599,416,288,961,833,650,522,467,156,884,701,573,390,262,1012,935,807, 752,624,130,986,858,675,547,492,364,909,781,726,598,232,960,832,649,521,466, 338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,491,180,908, 780,725,597,414,286,959,831,648,520,465,154,882,699,571,388,260,1010,805,750, 128,984,673,362,907,596,230,830,519,464,698,1009,932,621,310,855,544,489,178, 778,723,412,957,646,880,569,258,803,748,126,982,671,360,905,594,228,828,517, 462,696,1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567,256,801, 746,124,980,669,358] [views:debug,2014-08-19T16:50:32.229,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/449. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:32.229,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",449,active,0} [rebalance:info,2014-08-19T16:50:32.244,ns_1@10.242.238.88:<0.6154.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 696 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:32.244,ns_1@10.242.238.88:<0.6063.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 697 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:32.244,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 696 state to active [rebalance:info,2014-08-19T16:50:32.245,ns_1@10.242.238.88:<0.6154.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 696 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:32.245,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 697 state to active [rebalance:info,2014-08-19T16:50:32.246,ns_1@10.242.238.88:<0.6063.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 697 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:32.246,ns_1@10.242.238.88:<0.6154.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:32.247,ns_1@10.242.238.88:<0.6063.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:32.328,ns_1@10.242.238.88:<0.5909.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 699 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:32.328,ns_1@10.242.238.88:<0.5986.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 698 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:32.328,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 699 state to active [rebalance:info,2014-08-19T16:50:32.329,ns_1@10.242.238.88:<0.5909.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 699 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:32.329,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 698 state to active [rebalance:info,2014-08-19T16:50:32.330,ns_1@10.242.238.88:<0.5986.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 698 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:32.331,ns_1@10.242.238.88:<0.5909.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:32.331,ns_1@10.242.238.88:<0.5986.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:32.387,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 447. Nacking mccouch update. [views:debug,2014-08-19T16:50:32.387,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/447. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:32.387,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",447,active,0} [ns_server:debug,2014-08-19T16:50:32.389,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,852,541,486,903,775,720,592,226,954,826,643,515,460,332,877,694, 566,511,200,1005,928,800,745,617,434,306,979,851,668,540,485,174,902,774,719, 591,408,280,953,825,642,514,459,148,876,693,565,510,382,1004,927,799,744,616, 250,122,978,850,667,539,484,356,901,773,718,590,224,952,824,641,513,458,330, 875,692,564,509,198,1003,926,798,743,615,432,304,977,849,666,538,483,172,900, 772,717,589,406,278,951,823,640,512,457,146,874,691,563,508,380,1002,925,797, 742,614,248,120,976,848,665,537,482,354,899,771,716,588,222,950,822,767,639, 456,328,873,690,562,507,196,1001,924,796,741,613,430,302,975,847,664,536,481, 170,898,770,715,587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000, 923,795,740,612,246,118,974,846,663,535,480,352,897,769,714,586,220,948,820, 765,637,454,326,999,871,688,560,505,194,922,794,739,611,428,300,973,845,662, 534,479,168,896,768,713,585,402,274,947,819,764,636,453,142,998,870,687,559, 504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712,584,218,1023, 946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971, 843,660,532,477,166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868, 685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710,582, 216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424, 296,969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630, 447,136,992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472, 344,889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914, 786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811, 756,628,134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525, 470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184, 912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014,937, 809,754,626,132,988,860,677,549,494,366,911,783,728,600,234,962,834,651,523, 468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182, 910,782,727,599,416,288,961,833,650,522,467,156,884,701,573,390,262,1012,935, 807,752,624,130,986,858,675,547,492,364,909,781,726,598,232,960,832,649,521, 466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,491,180, 908,780,725,597,414,286,959,831,648,520,465,154,882,699,571,388,260,1010,805, 750,128,984,673,362,907,596,230,830,519,464,698,1009,932,621,310,855,544,489, 178,778,723,412,957,646,880,569,258,803,748,126,982,671,360,905,594,228,828, 517,462,696,1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567,256, 801,746,124,980,669,358] [rebalance:info,2014-08-19T16:50:32.428,ns_1@10.242.238.88:<0.5832.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 700 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:32.428,ns_1@10.242.238.88:<0.5754.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 701 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:32.429,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 700 state to active [rebalance:info,2014-08-19T16:50:32.430,ns_1@10.242.238.88:<0.5832.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 700 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:32.430,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 701 state to active [rebalance:info,2014-08-19T16:50:32.431,ns_1@10.242.238.88:<0.5754.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 701 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:32.431,ns_1@10.242.238.88:<0.5832.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:32.431,ns_1@10.242.238.88:<0.5754.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:32.463,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/447. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:32.463,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",447,active,0} [rebalance:info,2014-08-19T16:50:32.537,ns_1@10.242.238.88:<0.5564.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 703 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:32.537,ns_1@10.242.238.88:<0.5667.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 702 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:32.538,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 703 state to active [rebalance:info,2014-08-19T16:50:32.539,ns_1@10.242.238.88:<0.5564.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 703 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:32.539,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 702 state to active [rebalance:info,2014-08-19T16:50:32.540,ns_1@10.242.238.88:<0.5667.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 702 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:32.540,ns_1@10.242.238.88:<0.5564.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:32.540,ns_1@10.242.238.88:<0.5667.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:32.630,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 445. Nacking mccouch update. [views:debug,2014-08-19T16:50:32.630,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/445. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:32.630,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",445,active,0} [ns_server:debug,2014-08-19T16:50:32.632,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,852,541,486,903,775,720,592,226,954,826,643,515,460,332,877,694, 566,511,200,1005,928,800,745,617,434,306,979,851,668,540,485,174,902,774,719, 591,408,280,953,825,642,514,459,148,876,693,565,510,382,1004,927,799,744,616, 250,122,978,850,667,539,484,356,901,773,718,590,224,952,824,641,513,458,330, 875,692,564,509,198,1003,926,798,743,615,432,304,977,849,666,538,483,172,900, 772,717,589,406,278,951,823,640,512,457,146,874,691,563,508,380,1002,925,797, 742,614,248,120,976,848,665,537,482,354,899,771,716,588,222,950,822,767,639, 456,328,873,690,562,507,196,1001,924,796,741,613,430,302,975,847,664,536,481, 170,898,770,715,587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000, 923,795,740,612,246,118,974,846,663,535,480,352,897,769,714,586,220,948,820, 765,637,454,326,999,871,688,560,505,194,922,794,739,611,428,300,973,845,662, 534,479,168,896,768,713,585,402,274,947,819,764,636,453,142,998,870,687,559, 504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712,584,218,1023, 946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971, 843,660,532,477,166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868, 685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710,582, 216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424, 296,969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630, 447,136,992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472, 344,889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914, 786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811, 756,628,445,134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653, 525,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495, 184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014, 937,809,754,626,132,988,860,677,549,494,366,911,783,728,600,234,962,834,651, 523,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493, 182,910,782,727,599,416,288,961,833,650,522,467,156,884,701,573,390,262,1012, 935,807,752,624,130,986,858,675,547,492,364,909,781,726,598,232,960,832,649, 521,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,491, 180,908,780,725,597,414,286,959,831,648,520,465,154,882,699,571,388,260,1010, 805,750,128,984,673,362,907,596,230,830,519,464,698,1009,932,621,310,855,544, 489,178,778,723,412,957,646,880,569,258,803,748,126,982,671,360,905,594,228, 828,517,462,696,1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567, 256,801,746,124,980,669,358] [rebalance:info,2014-08-19T16:50:32.646,ns_1@10.242.238.88:<0.6127.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 952 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:32.646,ns_1@10.242.238.88:<0.6042.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 953 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:32.647,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 952 state to active [rebalance:info,2014-08-19T16:50:32.648,ns_1@10.242.238.88:<0.6127.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 952 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:32.648,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 953 state to active [rebalance:info,2014-08-19T16:50:32.649,ns_1@10.242.238.88:<0.6042.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 953 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:32.649,ns_1@10.242.238.88:<0.6127.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:32.650,ns_1@10.242.238.88:<0.6042.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:32.714,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/445. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:32.714,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",445,active,0} [rebalance:info,2014-08-19T16:50:32.763,ns_1@10.242.238.88:<0.5965.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 954 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:32.764,ns_1@10.242.238.88:<0.5888.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 955 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:32.764,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 954 state to active [rebalance:info,2014-08-19T16:50:32.765,ns_1@10.242.238.88:<0.5965.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 954 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:32.765,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 955 state to active [rebalance:info,2014-08-19T16:50:32.766,ns_1@10.242.238.88:<0.5888.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 955 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:32.766,ns_1@10.242.238.88:<0.5965.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:32.767,ns_1@10.242.238.88:<0.5888.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:32.789,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 443. Nacking mccouch update. [views:debug,2014-08-19T16:50:32.789,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/443. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:32.789,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",443,active,0} [ns_server:debug,2014-08-19T16:50:32.791,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,852,541,486,903,775,720,592,226,954,826,643,515,460,332,877,694, 566,511,200,1005,928,800,745,617,434,306,979,851,668,540,485,174,902,774,719, 591,408,280,953,825,642,514,459,148,876,693,565,510,382,1004,927,799,744,616, 250,122,978,850,667,539,484,356,901,773,718,590,224,952,824,641,513,458,330, 875,692,564,509,198,1003,926,798,743,615,432,304,977,849,666,538,483,172,900, 772,717,589,406,278,951,823,640,512,457,146,874,691,563,508,380,1002,925,797, 742,614,248,120,976,848,665,537,482,354,899,771,716,588,222,950,822,767,639, 456,328,873,690,562,507,196,1001,924,796,741,613,430,302,975,847,664,536,481, 170,898,770,715,587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000, 923,795,740,612,246,118,974,846,663,535,480,352,897,769,714,586,220,948,820, 765,637,454,326,999,871,688,560,505,194,922,794,739,611,428,300,973,845,662, 534,479,168,896,768,713,585,402,274,947,819,764,636,453,142,998,870,687,559, 504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712,584,218,1023, 946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971, 843,660,532,477,166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868, 685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710,582, 216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424, 296,969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630, 447,136,992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472, 344,889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914, 786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811, 756,628,445,134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653, 525,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495, 184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014, 937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,234,962,834, 651,523,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548, 493,182,910,782,727,599,416,288,961,833,650,522,467,156,884,701,573,390,262, 1012,935,807,752,624,130,986,858,675,547,492,364,909,781,726,598,232,960,832, 649,521,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546, 491,180,908,780,725,597,414,286,959,831,648,520,465,154,882,699,571,388,260, 1010,805,750,128,984,673,362,907,596,230,830,519,464,698,1009,932,621,310, 855,544,489,178,778,723,412,957,646,880,569,258,803,748,126,982,671,360,905, 594,228,828,517,462,696,1007,930,619,308,853,542,487,176,776,721,410,955,644, 878,567,256,801,746,124,980,669,358] [views:debug,2014-08-19T16:50:32.823,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/443. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:32.824,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",443,active,0} [rebalance:info,2014-08-19T16:50:32.880,ns_1@10.242.238.88:<0.5810.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 956 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:32.880,ns_1@10.242.238.88:<0.5731.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 957 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:32.880,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 956 state to active [rebalance:info,2014-08-19T16:50:32.881,ns_1@10.242.238.88:<0.5810.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 956 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:32.882,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 957 state to active [rebalance:info,2014-08-19T16:50:32.883,ns_1@10.242.238.88:<0.5731.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 957 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:32.883,ns_1@10.242.238.88:<0.5810.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:32.883,ns_1@10.242.238.88:<0.5731.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:32.923,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 441. Nacking mccouch update. [views:debug,2014-08-19T16:50:32.923,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/441. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:32.923,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",441,active,0} [ns_server:debug,2014-08-19T16:50:32.925,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,852,541,486,903,775,720,592,226,954,826,643,515,460,332,877,694, 566,511,200,1005,928,800,745,617,434,306,979,851,668,540,485,174,902,774,719, 591,408,280,953,825,642,514,459,148,876,693,565,510,382,1004,927,799,744,616, 250,122,978,850,667,539,484,356,901,773,718,590,224,952,824,641,513,458,330, 875,692,564,509,198,1003,926,798,743,615,432,304,977,849,666,538,483,172,900, 772,717,589,406,278,951,823,640,512,457,146,874,691,563,508,380,1002,925,797, 742,614,248,120,976,848,665,537,482,354,899,771,716,588,222,950,822,767,639, 456,328,873,690,562,507,196,1001,924,796,741,613,430,302,975,847,664,536,481, 170,898,770,715,587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000, 923,795,740,612,246,118,974,846,663,535,480,352,897,769,714,586,220,948,820, 765,637,454,326,999,871,688,560,505,194,922,794,739,611,428,300,973,845,662, 534,479,168,896,768,713,585,402,274,947,819,764,636,453,142,998,870,687,559, 504,376,921,793,738,610,244,116,972,844,661,533,478,350,895,712,584,218,1023, 946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971, 843,660,532,477,166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868, 685,557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710,582, 216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424, 296,969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138, 994,866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630, 447,136,992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472, 344,889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914, 786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811, 756,628,445,134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653, 525,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495, 184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014, 937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,234,962,834, 651,523,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548, 493,182,910,782,727,599,416,288,961,833,650,522,467,156,884,701,573,390,262, 1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781,726,598,232,960, 832,649,521,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674, 546,491,180,908,780,725,597,414,286,959,831,648,520,465,154,882,699,571,388, 260,1010,805,750,128,984,673,362,907,596,230,830,519,464,698,1009,932,621, 310,855,544,489,178,778,723,412,957,646,880,569,258,803,748,126,982,671,360, 905,594,228,828,517,462,696,1007,930,619,308,853,542,487,176,776,721,410,955, 644,878,567,256,801,746,124,980,669,358] [ns_server:debug,2014-08-19T16:50:32.949,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,938, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_938_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_938_'ns_1@10.242.238.90'">>}]}, {move_state,427, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_427_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_427_'ns_1@10.242.238.91'">>}]}, {move_state,683, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_683_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_683_'ns_1@10.242.238.91'">>}]}, {move_state,939, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_939_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_939_'ns_1@10.242.238.90'">>}]}, {move_state,428, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_428_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_428_'ns_1@10.242.238.91'">>}]}, {move_state,684, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_684_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_684_'ns_1@10.242.238.91'">>}]}, {move_state,940, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_940_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_940_'ns_1@10.242.238.90'">>}]}, {move_state,429, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_429_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_429_'ns_1@10.242.238.91'">>}]}, {move_state,685, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_685_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_685_'ns_1@10.242.238.91'">>}]}, {move_state,941, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_941_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_941_'ns_1@10.242.238.90'">>}]}, {move_state,430, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_430_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_430_'ns_1@10.242.238.91'">>}]}, {move_state,686, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_686_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_686_'ns_1@10.242.238.91'">>}]}, {move_state,942, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_942_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_942_'ns_1@10.242.238.90'">>}]}, {move_state,431, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_431_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_431_'ns_1@10.242.238.91'">>}]}, {move_state,687, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_687_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_687_'ns_1@10.242.238.91'">>}]}, {move_state,943, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_943_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_943_'ns_1@10.242.238.90'">>}]}, {move_state,432, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_432_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_432_'ns_1@10.242.238.91'">>}]}, {move_state,688, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_688_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_688_'ns_1@10.242.238.91'">>}]}, {move_state,944, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_944_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_944_'ns_1@10.242.238.90'">>}]}, {move_state,433, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_433_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_433_'ns_1@10.242.238.91'">>}]}, {move_state,689, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_689_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_689_'ns_1@10.242.238.91'">>}]}, {move_state,945, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_945_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_945_'ns_1@10.242.238.90'">>}]}, {move_state,434, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_434_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_434_'ns_1@10.242.238.91'">>}]}, {move_state,690, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_690_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_690_'ns_1@10.242.238.91'">>}]}, {move_state,946, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_946_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_946_'ns_1@10.242.238.90'">>}]}, {move_state,435, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_435_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_435_'ns_1@10.242.238.91'">>}]}, {move_state,691, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_691_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_691_'ns_1@10.242.238.91'">>}]}, {move_state,947, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_947_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_947_'ns_1@10.242.238.90'">>}]}, {move_state,436, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_436_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_436_'ns_1@10.242.238.91'">>}]}, {move_state,692, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_692_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_692_'ns_1@10.242.238.91'">>}]}, {move_state,948, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_948_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_948_'ns_1@10.242.238.90'">>}]}, {move_state,437, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_437_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_437_'ns_1@10.242.238.91'">>}]}, {move_state,693, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_693_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_693_'ns_1@10.242.238.91'">>}]}, {move_state,949, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_949_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_949_'ns_1@10.242.238.90'">>}]}, {move_state,438, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_438_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_438_'ns_1@10.242.238.91'">>}]}, {move_state,694, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_694_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_694_'ns_1@10.242.238.91'">>}]}, {move_state,950, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_950_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_950_'ns_1@10.242.238.90'">>}]}, {move_state,439, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_439_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_439_'ns_1@10.242.238.91'">>}]}, {move_state,695, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_695_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_695_'ns_1@10.242.238.91'">>}]}, {move_state,951, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_951_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_951_'ns_1@10.242.238.90'">>}]}, {move_state,440, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_440_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_440_'ns_1@10.242.238.91'">>}]}, {move_state,696, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_696_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_696_'ns_1@10.242.238.91'">>}]}, {move_state,952, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_952_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_952_'ns_1@10.242.238.90'">>}]}, {move_state,441, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_441_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_441_'ns_1@10.242.238.91'">>}]}, {move_state,697, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_697_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_697_'ns_1@10.242.238.91'">>}]}, {move_state,953, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_953_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_953_'ns_1@10.242.238.90'">>}]}, {move_state,442, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_442_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_442_'ns_1@10.242.238.91'">>}]}, {move_state,698, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_698_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_698_'ns_1@10.242.238.91'">>}]}, {move_state,954, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_954_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_954_'ns_1@10.242.238.90'">>}]}, {move_state,443, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_443_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_443_'ns_1@10.242.238.91'">>}]}, {move_state,699, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_699_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_699_'ns_1@10.242.238.91'">>}]}, {move_state,955, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_955_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_955_'ns_1@10.242.238.90'">>}]}, {move_state,444, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_444_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_444_'ns_1@10.242.238.91'">>}]}, {move_state,700, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_700_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_700_'ns_1@10.242.238.91'">>}]}, {move_state,956, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_956_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_956_'ns_1@10.242.238.90'">>}]}, {move_state,445, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_445_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_445_'ns_1@10.242.238.91'">>}]}, {move_state,701, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_701_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_701_'ns_1@10.242.238.91'">>}]}, {move_state,957, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_957_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_957_'ns_1@10.242.238.90'">>}]}, {move_state,446, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_446_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_446_'ns_1@10.242.238.91'">>}]}, {move_state,702, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_702_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_702_'ns_1@10.242.238.91'">>}]}, {move_state,958, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_958_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_958_'ns_1@10.242.238.90'">>}]}, {move_state,447, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_447_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_447_'ns_1@10.242.238.91'">>}]}, {move_state,959, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_959_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_959_'ns_1@10.242.238.90'">>}]}, {move_state,703, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_703_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_703_'ns_1@10.242.238.91'">>}]}] [ns_server:debug,2014-08-19T16:50:32.954,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 938, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.955,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 427, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.955,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 683, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 939, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 428, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 684, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 940, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 429, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 685, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 941, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 430, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 686, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 942, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 431, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 687, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 943, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 432, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 688, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 944, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 433, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 689, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 945, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 434, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 690, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 946, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 435, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 691, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 947, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 436, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 692, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 948, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 437, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 693, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 949, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 438, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 694, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 950, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 439, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 695, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 951, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 440, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 696, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 952, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 441, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 697, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 953, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 442, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 698, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 954, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 443, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 699, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 955, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 444, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 700, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 956, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 445, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 701, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 957, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 446, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 702, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 958, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 447, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:32.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 959, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:32.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 703, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [views:debug,2014-08-19T16:50:32.982,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/441. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:32.982,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",441,active,0} [rebalance:info,2014-08-19T16:50:32.990,ns_1@10.242.238.88:<0.5585.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 959 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:32.990,ns_1@10.242.238.88:<0.5646.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 958 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:32.991,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 959 state to active [rebalance:info,2014-08-19T16:50:32.992,ns_1@10.242.238.88:<0.5585.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 959 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:32.992,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 958 state to active [rebalance:info,2014-08-19T16:50:32.993,ns_1@10.242.238.88:<0.5646.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 958 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:32.993,ns_1@10.242.238.88:<0.5585.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:32.994,ns_1@10.242.238.88:<0.5646.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:33.101,ns_1@10.242.238.88:<0.7232.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 427 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:33.101,ns_1@10.242.238.88:<0.7155.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 428 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:33.101,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 427 state to active [rebalance:info,2014-08-19T16:50:33.102,ns_1@10.242.238.88:<0.7232.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 427 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:33.102,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 428 state to active [rebalance:info,2014-08-19T16:50:33.103,ns_1@10.242.238.88:<0.7155.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 428 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:33.103,ns_1@10.242.238.88:<0.7232.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:33.104,ns_1@10.242.238.88:<0.7155.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:33.107,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 439. Nacking mccouch update. [views:debug,2014-08-19T16:50:33.107,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/439. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:33.107,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",439,active,0} [ns_server:debug,2014-08-19T16:50:33.109,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,852,541,486,775,720,954,826,643,515,460,332,877,694,566,511,200, 1005,928,800,745,617,434,306,979,851,668,540,485,174,902,774,719,591,408,280, 953,825,642,514,459,148,876,693,565,510,382,1004,927,799,744,616,250,122,978, 850,667,539,484,356,901,773,718,590,224,952,824,641,513,458,330,875,692,564, 509,198,1003,926,798,743,615,432,304,977,849,666,538,483,172,900,772,717,589, 406,278,951,823,640,512,457,146,874,691,563,508,380,1002,925,797,742,614,248, 120,976,848,665,537,482,354,899,771,716,588,222,950,822,767,639,456,328,873, 690,562,507,196,1001,924,796,741,613,430,302,975,847,664,536,481,170,898,770, 715,587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000,923,795,740, 612,246,118,974,846,663,535,480,352,897,769,714,586,220,948,820,765,637,454, 326,999,871,688,560,505,194,922,794,739,611,428,300,973,845,662,534,479,168, 896,768,713,585,402,274,947,819,764,636,453,142,998,870,687,559,504,376,921, 793,738,610,244,116,972,844,661,533,478,350,895,712,584,218,1023,946,818,763, 635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660,532, 477,166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502, 374,919,791,736,608,242,114,970,842,659,531,476,348,893,710,582,216,1021,944, 816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969,841, 658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683, 555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422,294, 967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136,992, 864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706, 578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731,603, 420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756,628,445, 134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342, 887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912,784, 729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014,937,809,754, 626,443,132,988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468, 340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910, 782,727,599,416,288,961,833,650,522,467,156,884,701,573,390,262,1012,935,807, 752,624,441,130,986,858,675,547,492,364,909,781,726,598,232,960,832,649,521, 466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,491,180, 908,780,725,597,414,286,959,831,648,520,465,154,882,699,571,388,260,1010,805, 750,439,128,984,673,362,907,596,230,830,519,464,698,1009,932,621,310,855,544, 489,178,778,723,412,957,646,880,569,258,803,748,126,982,671,360,905,594,228, 828,517,462,696,1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567, 256,801,746,124,980,669,358,903,592,226] [rebalance:info,2014-08-19T16:50:33.166,ns_1@10.242.238.88:<0.7001.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 430 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:33.166,ns_1@10.242.238.88:<0.7078.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 429 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:33.167,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 430 state to active [rebalance:info,2014-08-19T16:50:33.168,ns_1@10.242.238.88:<0.7001.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 430 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:33.168,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 429 state to active [rebalance:info,2014-08-19T16:50:33.169,ns_1@10.242.238.88:<0.7078.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 429 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:33.169,ns_1@10.242.238.88:<0.7001.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:33.169,ns_1@10.242.238.88:<0.7078.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:33.179,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/439. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:33.180,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",439,active,0} [rebalance:info,2014-08-19T16:50:33.250,ns_1@10.242.238.88:<0.6920.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 431 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:33.250,ns_1@10.242.238.88:<0.6843.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 432 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:33.250,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 431 state to active [rebalance:info,2014-08-19T16:50:33.251,ns_1@10.242.238.88:<0.6920.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 431 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:33.252,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 432 state to active [rebalance:info,2014-08-19T16:50:33.252,ns_1@10.242.238.88:<0.6843.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 432 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:33.253,ns_1@10.242.238.88:<0.6920.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:33.253,ns_1@10.242.238.88:<0.6843.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:33.318,ns_1@10.242.238.88:<0.6689.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 434 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:33.318,ns_1@10.242.238.88:<0.6766.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 433 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:33.318,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 434 state to active [rebalance:info,2014-08-19T16:50:33.319,ns_1@10.242.238.88:<0.6689.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 434 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:33.320,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 433 state to active [rebalance:info,2014-08-19T16:50:33.321,ns_1@10.242.238.88:<0.6766.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 433 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:33.321,ns_1@10.242.238.88:<0.6689.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:33.321,ns_1@10.242.238.88:<0.6766.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:33.355,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 437. Nacking mccouch update. [views:debug,2014-08-19T16:50:33.355,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/437. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:33.355,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",437,active,0} [ns_server:debug,2014-08-19T16:50:33.357,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,852,541,486,775,720,954,826,643,515,460,332,877,694,566,511,200, 1005,928,800,745,617,434,306,979,851,668,540,485,174,902,774,719,591,408,280, 953,825,642,514,459,148,876,693,565,510,382,1004,927,799,744,616,250,122,978, 850,667,539,484,356,901,773,718,590,224,952,824,641,513,458,330,875,692,564, 509,198,1003,926,798,743,615,432,304,977,849,666,538,483,172,900,772,717,589, 406,278,951,823,640,512,457,146,874,691,563,508,380,1002,925,797,742,614,248, 120,976,848,665,537,482,354,899,771,716,588,222,950,822,767,639,456,328,873, 690,562,507,196,1001,924,796,741,613,430,302,975,847,664,536,481,170,898,770, 715,587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000,923,795,740, 612,246,118,974,846,663,535,480,352,897,769,714,586,220,948,820,765,637,454, 326,999,871,688,560,505,194,922,794,739,611,428,300,973,845,662,534,479,168, 896,768,713,585,402,274,947,819,764,636,453,142,998,870,687,559,504,376,921, 793,738,610,244,116,972,844,661,533,478,350,895,712,584,218,1023,946,818,763, 635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660,532, 477,166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502, 374,919,791,736,608,242,114,970,842,659,531,476,348,893,710,582,216,1021,944, 816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969,841, 658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683, 555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422,294, 967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136,992, 864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706, 578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731,603, 420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756,628,445, 134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342, 887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912,784, 729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014,937,809,754, 626,443,132,988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468, 340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910, 782,727,599,416,288,961,833,650,522,467,156,884,701,573,390,262,1012,935,807, 752,624,441,130,986,858,675,547,492,364,909,781,726,598,232,960,832,649,521, 466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,491,180, 908,780,725,597,414,286,959,831,648,520,465,154,882,699,571,388,260,1010,805, 750,439,128,984,673,362,907,596,230,830,519,464,698,1009,932,621,310,855,544, 489,178,778,723,412,957,646,880,569,258,803,748,437,126,982,671,360,905,594, 228,828,517,462,696,1007,930,619,308,853,542,487,176,776,721,410,955,644,878, 567,256,801,746,124,980,669,358,903,592,226] [rebalance:info,2014-08-19T16:50:33.384,ns_1@10.242.238.88:<0.6515.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 436 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:33.385,ns_1@10.242.238.88:<0.6597.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 435 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:33.385,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 436 state to active [rebalance:info,2014-08-19T16:50:33.386,ns_1@10.242.238.88:<0.6515.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 436 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:33.386,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 435 state to active [rebalance:info,2014-08-19T16:50:33.387,ns_1@10.242.238.88:<0.6597.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 435 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:33.387,ns_1@10.242.238.88:<0.6515.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:33.388,ns_1@10.242.238.88:<0.6597.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:33.414,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/437. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:33.414,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",437,active,0} [rebalance:info,2014-08-19T16:50:33.452,ns_1@10.242.238.88:<0.6443.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 437 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:33.452,ns_1@10.242.238.88:<0.6329.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 438 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:33.452,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 437 state to active [rebalance:info,2014-08-19T16:50:33.453,ns_1@10.242.238.88:<0.6443.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 437 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:33.453,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 438 state to active [rebalance:info,2014-08-19T16:50:33.454,ns_1@10.242.238.88:<0.6329.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 438 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:33.455,ns_1@10.242.238.88:<0.6443.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:33.455,ns_1@10.242.238.88:<0.6329.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:33.572,ns_1@10.242.238.88:<0.6266.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 439 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:33.572,ns_1@10.242.238.88:<0.8246.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 440) [ns_server:info,2014-08-19T16:50:33.572,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 439 state to active [rebalance:info,2014-08-19T16:50:33.573,ns_1@10.242.238.88:<0.6189.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:33.573,ns_1@10.242.238.88:<0.6266.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 439 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:33.574,ns_1@10.242.238.88:<0.6266.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:33.577,ns_1@10.242.238.88:<0.6197.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_440_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:33.577,ns_1@10.242.238.88:<0.6189.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:33.579,ns_1@10.242.238.88:<0.6189.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 440 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.8253.1> [ns_server:info,2014-08-19T16:50:33.580,ns_1@10.242.238.88:<0.8253.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 440 to state replica [ns_server:debug,2014-08-19T16:50:33.589,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 435. Nacking mccouch update. [views:debug,2014-08-19T16:50:33.589,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/435. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:33.589,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",435,active,0} [ns_server:debug,2014-08-19T16:50:33.591,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,852,541,486,775,720,954,826,643,515,460,332,877,694,566,511,200, 1005,928,800,745,617,434,306,979,851,668,540,485,174,902,774,719,591,408,280, 953,825,642,514,459,148,876,693,565,510,382,1004,927,799,744,616,250,122,978, 850,667,539,484,356,901,773,718,590,224,952,824,641,513,458,330,875,692,564, 509,198,1003,926,798,743,615,432,304,977,849,666,538,483,172,900,772,717,589, 406,278,951,823,640,512,457,146,874,691,563,508,380,1002,925,797,742,614,248, 120,976,848,665,537,482,354,899,771,716,588,222,950,822,767,639,456,328,873, 690,562,507,196,1001,924,796,741,613,430,302,975,847,664,536,481,170,898,770, 715,587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000,923,795,740, 612,246,118,974,846,663,535,480,352,897,769,714,586,220,948,820,765,637,454, 326,999,871,688,560,505,194,922,794,739,611,428,300,973,845,662,534,479,168, 896,768,713,585,402,274,947,819,764,636,453,142,998,870,687,559,504,376,921, 793,738,610,244,116,972,844,661,533,478,350,895,712,584,218,1023,946,818,763, 635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660,532, 477,166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502, 374,919,791,736,608,242,114,970,842,659,531,476,348,893,710,582,216,1021,944, 816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969,841, 658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683, 555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422,294, 967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136,992, 864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706, 578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731,603, 420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756,628,445, 134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342, 887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912,784, 729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014,937,809,754, 626,443,132,988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468, 340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910, 782,727,599,416,288,961,833,650,522,467,156,884,701,573,390,262,1012,935,807, 752,624,441,130,986,858,675,547,492,364,909,781,726,598,232,960,832,649,521, 466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,491,180, 908,780,725,597,414,286,959,831,648,520,465,154,882,699,571,388,260,1010,805, 750,439,128,984,673,362,907,596,230,830,519,464,698,1009,932,621,310,855,544, 489,178,778,723,412,957,646,880,569,258,803,748,437,126,982,671,360,905,594, 228,828,517,462,696,1007,930,619,308,853,542,487,176,776,721,410,955,644,878, 567,256,801,746,435,124,980,669,358,903,592,226] [ns_server:debug,2014-08-19T16:50:33.615,ns_1@10.242.238.88:<0.8253.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_440 [rebalance:info,2014-08-19T16:50:33.617,ns_1@10.242.238.88:<0.8253.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[440]}, {checkpoints,[{440,1}]}, {name,<<"rebalance_440">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[440]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"440"}]} [rebalance:debug,2014-08-19T16:50:33.618,ns_1@10.242.238.88:<0.8253.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8254.1> [rebalance:info,2014-08-19T16:50:33.619,ns_1@10.242.238.88:<0.8253.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:33.620,ns_1@10.242.238.88:<0.8253.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:33.620,ns_1@10.242.238.88:<0.8253.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:33.621,ns_1@10.242.238.88:<0.6189.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 440 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:33.623,ns_1@10.242.238.88:<0.6197.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:33.628,ns_1@10.242.238.88:<0.6197.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_440_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:33.628,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 440 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:33.628,ns_1@10.242.238.88:<0.8258.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 440 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:info,2014-08-19T16:50:33.645,ns_1@10.242.238.88:<0.8259.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 442) [rebalance:info,2014-08-19T16:50:33.645,ns_1@10.242.238.88:<0.6098.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 441 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:33.645,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 441 state to active [rebalance:info,2014-08-19T16:50:33.646,ns_1@10.242.238.88:<0.6021.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [ns_server:debug,2014-08-19T16:50:33.647,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:33.647,ns_1@10.242.238.88:<0.6098.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 441 on ns_1@10.242.238.88 [ns_server:debug,2014-08-19T16:50:33.647,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{440, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:33.648,ns_1@10.242.238.88:<0.6098.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:33.648,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:33.648,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:50:33.649,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/435. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:33.649,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",435,active,0} [ns_server:debug,2014-08-19T16:50:33.649,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:33.652,ns_1@10.242.238.88:<0.6029.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_442_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:33.652,ns_1@10.242.238.88:<0.6021.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:33.656,ns_1@10.242.238.88:<0.6021.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 442 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.8274.1> [ns_server:info,2014-08-19T16:50:33.657,ns_1@10.242.238.88:<0.8274.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 442 to state replica [rebalance:info,2014-08-19T16:50:33.664,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 440 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:50:33.665,ns_1@10.242.238.88:<0.8276.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 446) [rebalance:info,2014-08-19T16:50:33.665,ns_1@10.242.238.88:<0.8278.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 445) [rebalance:info,2014-08-19T16:50:33.665,ns_1@10.242.238.88:<0.8277.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 447) [rebalance:info,2014-08-19T16:50:33.665,ns_1@10.242.238.88:<0.8280.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 443) [rebalance:info,2014-08-19T16:50:33.665,ns_1@10.242.238.88:<0.8279.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 444) [rebalance:info,2014-08-19T16:50:33.665,ns_1@10.242.238.88:<0.7134.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 684 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:debug,2014-08-19T16:50:33.665,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 440) [ns_server:info,2014-08-19T16:50:33.665,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 684 state to active [rebalance:info,2014-08-19T16:50:33.666,ns_1@10.242.238.88:<0.5702.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:33.666,ns_1@10.242.238.88:<0.5789.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:33.666,ns_1@10.242.238.88:<0.5625.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [ns_server:debug,2014-08-19T16:50:33.666,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:33.666,ns_1@10.242.238.88:<0.5938.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:33.666,ns_1@10.242.238.88:<0.5853.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:33.667,ns_1@10.242.238.88:<0.7134.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 684 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:33.667,ns_1@10.242.238.88:<0.7134.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:33.671,ns_1@10.242.238.88:<0.5633.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_447_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:33.672,ns_1@10.242.238.88:<0.5625.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:33.672,ns_1@10.242.238.88:<0.5710.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_446_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:33.672,ns_1@10.242.238.88:<0.5702.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:33.672,ns_1@10.242.238.88:<0.5797.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_445_'ns_1@10.242.238.89'">>] [ns_server:info,2014-08-19T16:50:33.672,ns_1@10.242.238.88:<0.5952.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_443_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:33.672,ns_1@10.242.238.88:<0.5789.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:33.673,ns_1@10.242.238.88:<0.5938.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:33.673,ns_1@10.242.238.88:<0.5861.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_444_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:33.673,ns_1@10.242.238.88:<0.5853.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:33.678,ns_1@10.242.238.88:<0.5625.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 447 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.8295.1> [ns_server:info,2014-08-19T16:50:33.680,ns_1@10.242.238.88:<0.8295.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 447 to state replica [ns_server:debug,2014-08-19T16:50:33.680,ns_1@10.242.238.88:<0.5702.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 446 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.8296.1> [ns_server:debug,2014-08-19T16:50:33.680,ns_1@10.242.238.88:<0.5938.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 443 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.8297.1> [ns_server:debug,2014-08-19T16:50:33.681,ns_1@10.242.238.88:<0.5789.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 445 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.8298.1> [ns_server:info,2014-08-19T16:50:33.681,ns_1@10.242.238.88:<0.8297.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 443 to state replica [ns_server:info,2014-08-19T16:50:33.681,ns_1@10.242.238.88:<0.8296.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 446 to state replica [ns_server:debug,2014-08-19T16:50:33.681,ns_1@10.242.238.88:<0.5853.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 444 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.8299.1> [ns_server:info,2014-08-19T16:50:33.681,ns_1@10.242.238.88:<0.8298.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 445 to state replica [ns_server:info,2014-08-19T16:50:33.682,ns_1@10.242.238.88:<0.8299.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 444 to state replica [ns_server:debug,2014-08-19T16:50:33.691,ns_1@10.242.238.88:<0.8274.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_442 [rebalance:info,2014-08-19T16:50:33.693,ns_1@10.242.238.88:<0.8274.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[442]}, {checkpoints,[{442,1}]}, {name,<<"rebalance_442">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[442]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"442"}]} [rebalance:debug,2014-08-19T16:50:33.694,ns_1@10.242.238.88:<0.8274.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8314.1> [rebalance:info,2014-08-19T16:50:33.695,ns_1@10.242.238.88:<0.8274.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:33.697,ns_1@10.242.238.88:<0.8274.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:33.697,ns_1@10.242.238.88:<0.8274.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:33.698,ns_1@10.242.238.88:<0.6021.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 442 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:33.700,ns_1@10.242.238.88:<0.6029.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:33.709,ns_1@10.242.238.88:<0.6029.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_442_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:33.710,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 442 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:33.710,ns_1@10.242.238.88:<0.8318.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 442 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:33.715,ns_1@10.242.238.88:<0.8295.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_447 [rebalance:info,2014-08-19T16:50:33.716,ns_1@10.242.238.88:<0.8295.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[447]}, {checkpoints,[{447,1}]}, {name,<<"rebalance_447">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[447]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"447"}]} [rebalance:debug,2014-08-19T16:50:33.717,ns_1@10.242.238.88:<0.8295.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8319.1> [rebalance:info,2014-08-19T16:50:33.718,ns_1@10.242.238.88:<0.8295.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:33.720,ns_1@10.242.238.88:<0.8295.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:33.720,ns_1@10.242.238.88:<0.8295.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:33.721,ns_1@10.242.238.88:<0.5625.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 447 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:33.723,ns_1@10.242.238.88:<0.5633.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:33.723,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 433. Nacking mccouch update. [views:debug,2014-08-19T16:50:33.723,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/433. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:33.723,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",433,active,0} [ns_server:debug,2014-08-19T16:50:33.725,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,852,541,486,775,720,954,826,643,515,460,332,877,694,566,511,200, 1005,928,800,745,617,434,306,979,851,668,540,485,174,902,774,719,591,408,280, 953,825,642,514,459,148,876,693,565,510,382,1004,927,799,744,616,433,250,122, 978,850,667,539,484,356,901,773,718,590,224,952,824,641,513,458,330,875,692, 564,509,198,1003,926,798,743,615,432,304,977,849,666,538,483,172,900,772,717, 589,406,278,951,823,640,512,457,146,874,691,563,508,380,1002,925,797,742,614, 248,120,976,848,665,537,482,354,899,771,716,588,222,950,822,767,639,456,328, 873,690,562,507,196,1001,924,796,741,613,430,302,975,847,664,536,481,170,898, 770,715,587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000,923,795, 740,612,246,118,974,846,663,535,480,352,897,769,714,586,220,948,820,765,637, 454,326,999,871,688,560,505,194,922,794,739,611,428,300,973,845,662,534,479, 168,896,768,713,585,402,274,947,819,764,636,453,142,998,870,687,559,504,376, 921,793,738,610,244,116,972,844,661,533,478,350,895,712,584,218,1023,946,818, 763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660, 532,477,166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557, 502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710,582,216,1021, 944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969, 841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866, 683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708,580, 214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422, 294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136, 992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889, 706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731, 603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756,628, 445,134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470, 342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912, 784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014,937,809, 754,626,443,132,988,860,677,549,494,366,911,783,728,600,234,962,834,651,523, 468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182, 910,782,727,599,416,288,961,833,650,522,467,156,884,701,573,390,262,1012,935, 807,752,624,441,130,986,858,675,547,492,364,909,781,726,598,232,960,832,649, 521,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,491, 180,908,780,725,597,414,286,959,831,648,520,465,154,882,699,571,388,260,1010, 805,750,439,128,984,673,362,907,596,230,830,519,464,698,1009,932,621,310,855, 544,489,178,778,723,412,957,646,880,569,258,803,748,437,126,982,671,360,905, 594,228,828,517,462,696,1007,930,619,308,853,542,487,176,776,721,410,955,644, 878,567,256,801,746,435,124,980,669,358,903,592,226] [ns_server:info,2014-08-19T16:50:33.726,ns_1@10.242.238.88:<0.5633.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_447_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:33.727,ns_1@10.242.238.88:<0.8296.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_446 [rebalance:info,2014-08-19T16:50:33.729,ns_1@10.242.238.88:<0.8296.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[446]}, {checkpoints,[{446,1}]}, {name,<<"rebalance_446">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[446]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"446"}]} [rebalance:debug,2014-08-19T16:50:33.730,ns_1@10.242.238.88:<0.8296.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8322.1> [rebalance:info,2014-08-19T16:50:33.734,ns_1@10.242.238.88:<0.6976.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 686 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:33.734,ns_1@10.242.238.88:<0.7197.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 683 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:33.734,ns_1@10.242.238.88:<0.8296.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:50:33.734,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 683 state to active [ns_server:debug,2014-08-19T16:50:33.735,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:33.735,ns_1@10.242.238.88:<0.7197.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 683 on ns_1@10.242.238.88 [ns_server:debug,2014-08-19T16:50:33.736,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:33.736,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:33.736,ns_1@10.242.238.88:<0.8296.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [ns_server:info,2014-08-19T16:50:33.736,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 686 state to active [ns_server:debug,2014-08-19T16:50:33.736,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:33.736,ns_1@10.242.238.88:<0.8296.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:33.737,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{442, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:33.737,ns_1@10.242.238.88:<0.5702.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 446 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:33.738,ns_1@10.242.238.88:<0.6976.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 686 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:33.738,ns_1@10.242.238.88:<0.7197.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:33.738,ns_1@10.242.238.88:<0.6976.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:debug,2014-08-19T16:50:33.739,ns_1@10.242.238.88:<0.5710.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:33.742,ns_1@10.242.238.88:<0.5710.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_446_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:33.746,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 442 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:33.746,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 442) [ns_server:debug,2014-08-19T16:50:33.747,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:33.747,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 447 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:33.747,ns_1@10.242.238.88:<0.8343.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 447 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:33.755,ns_1@10.242.238.88:<0.8298.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_445 [rebalance:info,2014-08-19T16:50:33.756,ns_1@10.242.238.88:<0.8298.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[445]}, {checkpoints,[{445,1}]}, {name,<<"rebalance_445">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[445]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"445"}]} [rebalance:debug,2014-08-19T16:50:33.757,ns_1@10.242.238.88:<0.8298.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8344.1> [views:debug,2014-08-19T16:50:33.757,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/433. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:33.757,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",433,active,0} [rebalance:info,2014-08-19T16:50:33.758,ns_1@10.242.238.88:<0.8298.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:33.759,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:33.760,ns_1@10.242.238.88:<0.8298.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:33.760,ns_1@10.242.238.88:<0.8298.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:33.760,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:33.760,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.761,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{447, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:33.761,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:33.761,ns_1@10.242.238.88:<0.5789.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 445 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:33.763,ns_1@10.242.238.88:<0.8299.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_444 [rebalance:debug,2014-08-19T16:50:33.763,ns_1@10.242.238.88:<0.5797.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:33.764,ns_1@10.242.238.88:<0.8299.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[444]}, {checkpoints,[{444,1}]}, {name,<<"rebalance_444">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[444]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"444"}]} [rebalance:debug,2014-08-19T16:50:33.765,ns_1@10.242.238.88:<0.8299.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8352.1> [rebalance:info,2014-08-19T16:50:33.768,ns_1@10.242.238.88:<0.8299.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:50:33.769,ns_1@10.242.238.88:<0.5797.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_445_'ns_1@10.242.238.91'">>] [rebalance:debug,2014-08-19T16:50:33.769,ns_1@10.242.238.88:<0.8299.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:33.770,ns_1@10.242.238.88:<0.8299.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:33.770,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 447 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:50:33.771,ns_1@10.242.238.88:<0.5853.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 444 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:33.771,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 447) [ns_server:debug,2014-08-19T16:50:33.771,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:33.772,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 446 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:33.772,ns_1@10.242.238.88:<0.8357.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 446 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:debug,2014-08-19T16:50:33.772,ns_1@10.242.238.88:<0.5861.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:33.775,ns_1@10.242.238.88:<0.5861.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_444_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:33.776,ns_1@10.242.238.88:<0.8297.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_443 [rebalance:info,2014-08-19T16:50:33.777,ns_1@10.242.238.88:<0.8297.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[443]}, {checkpoints,[{443,1}]}, {name,<<"rebalance_443">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[443]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"443"}]} [rebalance:debug,2014-08-19T16:50:33.778,ns_1@10.242.238.88:<0.8297.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8360.1> [rebalance:info,2014-08-19T16:50:33.779,ns_1@10.242.238.88:<0.8297.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:33.780,ns_1@10.242.238.88:<0.8297.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:33.780,ns_1@10.242.238.88:<0.8297.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:33.781,ns_1@10.242.238.88:<0.5938.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 443 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:33.783,ns_1@10.242.238.88:<0.5952.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:33.786,ns_1@10.242.238.88:<0.5952.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_443_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:33.787,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.788,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.788,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{446, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:33.789,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:33.790,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:33.798,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 446 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:33.799,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 446) [ns_server:debug,2014-08-19T16:50:33.799,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:33.800,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 445 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:33.800,ns_1@10.242.238.88:<0.8381.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 445 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:33.817,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.818,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.818,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{445, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:33.818,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:33.818,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:33.824,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 445 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:33.824,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 445) [ns_server:debug,2014-08-19T16:50:33.825,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:33.825,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 444 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:33.825,ns_1@10.242.238.88:<0.8398.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 444 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:33.832,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 431. Nacking mccouch update. [views:debug,2014-08-19T16:50:33.832,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/431. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:33.832,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",431,active,0} [ns_server:debug,2014-08-19T16:50:33.834,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,852,541,486,775,720,954,826,643,515,460,332,877,694,566,511,200, 1005,928,800,745,617,434,306,979,851,668,540,485,174,902,774,719,591,408,280, 953,825,642,514,459,148,876,693,565,510,382,1004,927,799,744,616,433,250,122, 978,850,667,539,484,356,901,773,718,590,224,952,824,641,513,458,330,875,692, 564,509,198,1003,926,798,743,615,432,304,977,849,666,538,483,172,900,772,717, 589,406,278,951,823,640,512,457,146,874,691,563,508,380,1002,925,797,742,614, 431,248,120,976,848,665,537,482,354,899,771,716,588,222,950,822,767,639,456, 328,873,690,562,507,196,1001,924,796,741,613,430,302,975,847,664,536,481,170, 898,770,715,587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000,923, 795,740,612,246,118,974,846,663,535,480,352,897,769,714,586,220,948,820,765, 637,454,326,999,871,688,560,505,194,922,794,739,611,428,300,973,845,662,534, 479,168,896,768,713,585,402,274,947,819,764,636,453,142,998,870,687,559,504, 376,921,793,738,610,244,116,972,844,661,533,478,350,895,712,584,218,1023,946, 818,763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843, 660,532,477,166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685, 557,502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710,582,216, 1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296, 969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994, 866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708, 580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605, 422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447, 136,992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344, 889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786, 731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756, 628,445,134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525, 470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184, 912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014,937, 809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,234,962,834,651, 523,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493, 182,910,782,727,599,416,288,961,833,650,522,467,156,884,701,573,390,262,1012, 935,807,752,624,441,130,986,858,675,547,492,364,909,781,726,598,232,960,832, 649,521,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546, 491,180,908,780,725,597,414,286,959,831,648,520,465,154,882,699,571,388,260, 1010,805,750,439,128,984,673,362,907,596,230,830,519,464,698,1009,932,621, 310,855,544,489,178,778,723,412,957,646,880,569,258,803,748,437,126,982,671, 360,905,594,228,828,517,462,696,1007,930,619,308,853,542,487,176,776,721,410, 955,644,878,567,256,801,746,435,124,980,669,358,903,592,226] [ns_server:debug,2014-08-19T16:50:33.839,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:33.840,ns_1@10.242.238.88:<0.6822.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 688 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:33.840,ns_1@10.242.238.88:<0.7057.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 685 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:debug,2014-08-19T16:50:33.840,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:33.840,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.840,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{444, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:33.841,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:33.841,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 688 state to active [rebalance:info,2014-08-19T16:50:33.842,ns_1@10.242.238.88:<0.6822.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 688 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:33.842,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 685 state to active [rebalance:info,2014-08-19T16:50:33.843,ns_1@10.242.238.88:<0.7057.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 685 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:33.844,ns_1@10.242.238.88:<0.6822.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:33.844,ns_1@10.242.238.88:<0.7057.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:33.847,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 444 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:33.847,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 444) [ns_server:debug,2014-08-19T16:50:33.848,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:33.848,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 443 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:33.849,ns_1@10.242.238.88:<0.8417.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 443 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [views:debug,2014-08-19T16:50:33.866,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/431. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:33.866,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",431,active,0} [ns_server:debug,2014-08-19T16:50:33.868,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{443, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:33.868,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.870,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:33.871,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:33.871,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:33.884,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 443 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:33.885,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 443) [ns_server:debug,2014-08-19T16:50:33.886,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:33.940,ns_1@10.242.238.88:<0.6899.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 687 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:33.940,ns_1@10.242.238.88:<0.6654.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 690 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:33.941,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 687 state to active [rebalance:info,2014-08-19T16:50:33.942,ns_1@10.242.238.88:<0.6899.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 687 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:33.942,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 690 state to active [rebalance:info,2014-08-19T16:50:33.943,ns_1@10.242.238.88:<0.6654.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 690 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:33.943,ns_1@10.242.238.88:<0.6899.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:33.944,ns_1@10.242.238.88:<0.6654.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:33.966,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 429. Nacking mccouch update. [views:debug,2014-08-19T16:50:33.966,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/429. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:33.966,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",429,active,0} [ns_server:debug,2014-08-19T16:50:33.968,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,852,541,486,775,720,954,643,332,877,694,566,511,200,1005,928,800, 745,617,434,306,979,851,668,540,485,174,902,774,719,591,408,280,953,825,642, 514,459,148,876,693,565,510,382,1004,927,799,744,616,433,250,122,978,850,667, 539,484,356,901,773,718,590,224,952,824,641,513,458,330,875,692,564,509,198, 1003,926,798,743,615,432,304,977,849,666,538,483,172,900,772,717,589,406,278, 951,823,640,512,457,146,874,691,563,508,380,1002,925,797,742,614,431,248,120, 976,848,665,537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690, 562,507,196,1001,924,796,741,613,430,302,975,847,664,536,481,170,898,770,715, 587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000,923,795,740,612, 429,246,118,974,846,663,535,480,352,897,769,714,586,220,948,820,765,637,454, 326,999,871,688,560,505,194,922,794,739,611,428,300,973,845,662,534,479,168, 896,768,713,585,402,274,947,819,764,636,453,142,998,870,687,559,504,376,921, 793,738,610,244,116,972,844,661,533,478,350,895,712,584,218,1023,946,818,763, 635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660,532, 477,166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502, 374,919,791,736,608,242,114,970,842,659,531,476,348,893,710,582,216,1021,944, 816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969,841, 658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683, 555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708,580,214, 1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422,294, 967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136,992, 864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889,706, 578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731,603, 420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756,628,445, 134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470,342, 887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912,784, 729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014,937,809,754, 626,443,132,988,860,677,549,494,366,911,783,728,600,234,962,834,651,523,468, 340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182,910, 782,727,599,416,288,961,833,650,522,467,156,884,701,573,390,262,1012,935,807, 752,624,441,130,986,858,675,547,492,364,909,781,726,598,232,960,832,649,521, 466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,491,180, 908,780,725,597,414,286,959,831,648,520,465,154,882,699,571,388,260,1010,805, 750,439,128,984,673,362,907,596,230,830,519,464,698,1009,932,621,310,855,544, 489,178,778,723,412,957,646,880,569,258,803,748,437,126,982,671,360,905,594, 228,828,517,462,696,1007,930,619,308,853,542,487,176,776,721,410,955,644,878, 567,256,801,746,435,124,980,669,358,903,592,226,826,515,460] [views:debug,2014-08-19T16:50:34.000,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/429. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:34.000,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",429,active,0} [rebalance:info,2014-08-19T16:50:34.033,ns_1@10.242.238.88:<0.6745.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 689 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:34.033,ns_1@10.242.238.88:<0.6494.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 692 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:34.033,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 689 state to active [rebalance:info,2014-08-19T16:50:34.034,ns_1@10.242.238.88:<0.6745.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 689 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:34.034,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 692 state to active [rebalance:info,2014-08-19T16:50:34.035,ns_1@10.242.238.88:<0.6494.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 692 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:34.035,ns_1@10.242.238.88:<0.6745.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:34.036,ns_1@10.242.238.88:<0.6494.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:34.092,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 427. Nacking mccouch update. [views:debug,2014-08-19T16:50:34.092,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/427. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:34.092,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",427,active,0} [ns_server:debug,2014-08-19T16:50:34.094,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,852,541,486,775,720,954,643,332,877,694,566,511,200,1005,928,800, 745,617,434,306,979,851,668,540,485,174,902,774,719,591,408,280,953,825,642, 514,459,148,876,693,565,510,382,1004,927,799,744,616,433,250,122,978,850,667, 539,484,356,901,773,718,590,224,952,824,641,513,458,330,875,692,564,509,198, 1003,926,798,743,615,432,304,977,849,666,538,483,172,900,772,717,589,406,278, 951,823,640,512,457,146,874,691,563,508,380,1002,925,797,742,614,431,248,120, 976,848,665,537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690, 562,507,196,1001,924,796,741,613,430,302,975,847,664,536,481,170,898,770,715, 587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000,923,795,740,612, 429,246,118,974,846,663,535,480,352,897,769,714,586,220,948,820,765,637,454, 326,999,871,688,560,505,194,922,794,739,611,428,300,973,845,662,534,479,168, 896,768,713,585,402,274,947,819,764,636,453,142,998,870,687,559,504,376,921, 793,738,610,427,244,116,972,844,661,533,478,350,895,712,584,218,1023,946,818, 763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660, 532,477,166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557, 502,374,919,791,736,608,242,114,970,842,659,531,476,348,893,710,582,216,1021, 944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969, 841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866, 683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708,580, 214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422, 294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136, 992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344,889, 706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786,731, 603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756,628, 445,134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525,470, 342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184,912, 784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014,937,809, 754,626,443,132,988,860,677,549,494,366,911,783,728,600,234,962,834,651,523, 468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493,182, 910,782,727,599,416,288,961,833,650,522,467,156,884,701,573,390,262,1012,935, 807,752,624,441,130,986,858,675,547,492,364,909,781,726,598,232,960,832,649, 521,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546,491, 180,908,780,725,597,414,286,959,831,648,520,465,154,882,699,571,388,260,1010, 805,750,439,128,984,673,362,907,596,230,830,519,464,698,1009,932,621,310,855, 544,489,178,778,723,412,957,646,880,569,258,803,748,437,126,982,671,360,905, 594,228,828,517,462,696,1007,930,619,308,853,542,487,176,776,721,410,955,644, 878,567,256,801,746,435,124,980,669,358,903,592,226,826,515,460] [rebalance:info,2014-08-19T16:50:34.133,ns_1@10.242.238.88:<0.6576.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 691 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:34.133,ns_1@10.242.238.88:<0.6308.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 694 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:34.133,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 691 state to active [rebalance:info,2014-08-19T16:50:34.134,ns_1@10.242.238.88:<0.6576.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 691 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:34.135,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 694 state to active [rebalance:info,2014-08-19T16:50:34.136,ns_1@10.242.238.88:<0.6308.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 694 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:34.136,ns_1@10.242.238.88:<0.6576.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:34.136,ns_1@10.242.238.88:<0.6308.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:34.142,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/427. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:34.142,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",427,active,0} [rebalance:info,2014-08-19T16:50:34.217,ns_1@10.242.238.88:<0.8493.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 696) [rebalance:info,2014-08-19T16:50:34.217,ns_1@10.242.238.88:<0.6385.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 693 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:34.217,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 693 state to active [rebalance:info,2014-08-19T16:50:34.218,ns_1@10.242.238.88:<0.6154.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:34.219,ns_1@10.242.238.88:<0.6385.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 693 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:34.219,ns_1@10.242.238.88:<0.6385.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:34.221,ns_1@10.242.238.88:<0.6162.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_696_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:34.221,ns_1@10.242.238.88:<0.6154.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:34.224,ns_1@10.242.238.88:<0.6154.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 696 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.8500.1> [ns_server:info,2014-08-19T16:50:34.224,ns_1@10.242.238.88:<0.8500.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 696 to state replica [ns_server:debug,2014-08-19T16:50:34.245,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 425. Nacking mccouch update. [views:debug,2014-08-19T16:50:34.245,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/425. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:34.245,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",425,active,0} [ns_server:debug,2014-08-19T16:50:34.247,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,852,541,486,775,720,954,643,332,877,694,566,511,200,1005,928,800, 745,617,434,306,979,851,668,540,485,174,902,774,719,591,408,280,953,825,642, 514,459,148,876,693,565,510,382,1004,927,799,744,616,433,250,122,978,850,667, 539,484,356,901,773,718,590,224,952,824,641,513,458,330,875,692,564,509,198, 1003,926,798,743,615,432,304,977,849,666,538,483,172,900,772,717,589,406,278, 951,823,640,512,457,146,874,691,563,508,380,1002,925,797,742,614,431,248,120, 976,848,665,537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690, 562,507,196,1001,924,796,741,613,430,302,975,847,664,536,481,170,898,770,715, 587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000,923,795,740,612, 429,246,118,974,846,663,535,480,352,897,769,714,586,220,948,820,765,637,454, 326,999,871,688,560,505,194,922,794,739,611,428,300,973,845,662,534,479,168, 896,768,713,585,402,274,947,819,764,636,453,142,998,870,687,559,504,376,921, 793,738,610,427,244,116,972,844,661,533,478,350,895,712,584,218,1023,946,818, 763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660, 532,477,166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557, 502,374,919,791,736,608,425,242,114,970,842,659,531,476,348,893,710,582,216, 1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296, 969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994, 866,683,555,500,372,917,789,734,606,240,112,968,840,657,529,474,346,891,708, 580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605, 422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447, 136,992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472,344, 889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786, 731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756, 628,445,134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653,525, 470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495,184, 912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014,937, 809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,234,962,834,651, 523,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548,493, 182,910,782,727,599,416,288,961,833,650,522,467,156,884,701,573,390,262,1012, 935,807,752,624,441,130,986,858,675,547,492,364,909,781,726,598,232,960,832, 649,521,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674,546, 491,180,908,780,725,597,414,286,959,831,648,520,465,154,882,699,571,388,260, 1010,805,750,439,128,984,673,362,907,596,230,830,519,464,698,1009,932,621, 310,855,544,489,178,778,723,412,957,646,880,569,258,803,748,437,126,982,671, 360,905,594,228,828,517,462,696,1007,930,619,308,853,542,487,176,776,721,410, 955,644,878,567,256,801,746,435,124,980,669,358,903,592,226,826,515,460] [ns_server:debug,2014-08-19T16:50:34.259,ns_1@10.242.238.88:<0.8500.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_696 [rebalance:info,2014-08-19T16:50:34.260,ns_1@10.242.238.88:<0.8500.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[696]}, {checkpoints,[{696,1}]}, {name,<<"rebalance_696">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[696]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"696"}]} [rebalance:debug,2014-08-19T16:50:34.261,ns_1@10.242.238.88:<0.8500.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8501.1> [rebalance:info,2014-08-19T16:50:34.262,ns_1@10.242.238.88:<0.8500.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:34.263,ns_1@10.242.238.88:<0.8500.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:34.264,ns_1@10.242.238.88:<0.8500.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:34.264,ns_1@10.242.238.88:<0.6154.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 696 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:34.266,ns_1@10.242.238.88:<0.6162.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:34.267,ns_1@10.242.238.88:<0.8502.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 698) [rebalance:info,2014-08-19T16:50:34.267,ns_1@10.242.238.88:<0.6231.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 695 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:34.268,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 695 state to active [rebalance:info,2014-08-19T16:50:34.268,ns_1@10.242.238.88:<0.5986.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:34.269,ns_1@10.242.238.88:<0.6231.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 695 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:34.269,ns_1@10.242.238.88:<0.6231.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:34.269,ns_1@10.242.238.88:<0.6162.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_696_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:34.270,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 696 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:34.270,ns_1@10.242.238.88:<0.8510.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 696 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:info,2014-08-19T16:50:34.271,ns_1@10.242.238.88:<0.5994.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_698_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:34.271,ns_1@10.242.238.88:<0.5986.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:34.274,ns_1@10.242.238.88:<0.5986.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 698 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.8513.1> [ns_server:info,2014-08-19T16:50:34.274,ns_1@10.242.238.88:<0.8513.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 698 to state replica [views:debug,2014-08-19T16:50:34.276,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/425. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:34.276,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",425,active,0} [ns_server:debug,2014-08-19T16:50:34.289,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.291,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{696, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:34.291,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:34.291,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.291,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:34.297,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 696 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:34.298,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 696) [ns_server:debug,2014-08-19T16:50:34.298,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:34.304,ns_1@10.242.238.88:<0.8513.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_698 [rebalance:info,2014-08-19T16:50:34.306,ns_1@10.242.238.88:<0.8513.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[698]}, {checkpoints,[{698,1}]}, {name,<<"rebalance_698">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[698]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"698"}]} [rebalance:debug,2014-08-19T16:50:34.306,ns_1@10.242.238.88:<0.8513.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8523.1> [rebalance:info,2014-08-19T16:50:34.307,ns_1@10.242.238.88:<0.8513.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:34.309,ns_1@10.242.238.88:<0.8513.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:34.309,ns_1@10.242.238.88:<0.8513.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:34.310,ns_1@10.242.238.88:<0.5986.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 698 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:34.312,ns_1@10.242.238.88:<0.5994.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:34.315,ns_1@10.242.238.88:<0.5994.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_698_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:34.316,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 698 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:34.316,ns_1@10.242.238.88:<0.8535.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 698 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:info,2014-08-19T16:50:34.329,ns_1@10.242.238.88:<0.8542.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 702) [rebalance:info,2014-08-19T16:50:34.330,ns_1@10.242.238.88:<0.8543.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 699) [rebalance:info,2014-08-19T16:50:34.330,ns_1@10.242.238.88:<0.8544.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 701) [rebalance:info,2014-08-19T16:50:34.330,ns_1@10.242.238.88:<0.8545.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 700) [rebalance:info,2014-08-19T16:50:34.330,ns_1@10.242.238.88:<0.7253.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 938 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:34.330,ns_1@10.242.238.88:<0.8546.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 697) [ns_server:info,2014-08-19T16:50:34.330,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 938 state to active [rebalance:info,2014-08-19T16:50:34.331,ns_1@10.242.238.88:<0.5667.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:34.331,ns_1@10.242.238.88:<0.5909.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:34.331,ns_1@10.242.238.88:<0.5754.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:34.331,ns_1@10.242.238.88:<0.5832.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [ns_server:debug,2014-08-19T16:50:34.331,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:34.331,ns_1@10.242.238.88:<0.7253.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 938 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:34.332,ns_1@10.242.238.88:<0.6063.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [ns_server:debug,2014-08-19T16:50:34.332,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:50:34.332,ns_1@10.242.238.88:<0.7253.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:34.332,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.332,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{698, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:34.333,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:34.335,ns_1@10.242.238.88:<0.5675.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_702_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:34.335,ns_1@10.242.238.88:<0.5667.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:34.336,ns_1@10.242.238.88:<0.5917.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_699_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:34.336,ns_1@10.242.238.88:<0.5909.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:34.336,ns_1@10.242.238.88:<0.5762.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_701_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:34.336,ns_1@10.242.238.88:<0.5754.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:34.337,ns_1@10.242.238.88:<0.5840.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_700_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:34.337,ns_1@10.242.238.88:<0.5832.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:34.337,ns_1@10.242.238.88:<0.6071.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_697_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:34.337,ns_1@10.242.238.88:<0.6063.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:34.338,ns_1@10.242.238.88:<0.5667.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 702 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.8569.1> [ns_server:info,2014-08-19T16:50:34.341,ns_1@10.242.238.88:<0.8569.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 702 to state replica [rebalance:info,2014-08-19T16:50:34.342,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 698 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:34.343,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 698) [ns_server:debug,2014-08-19T16:50:34.343,ns_1@10.242.238.88:<0.5909.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 699 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.8570.1> [ns_server:debug,2014-08-19T16:50:34.343,ns_1@10.242.238.88:<0.5832.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 700 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.8571.1> [ns_server:debug,2014-08-19T16:50:34.344,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:info,2014-08-19T16:50:34.344,ns_1@10.242.238.88:<0.8570.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 699 to state replica [ns_server:info,2014-08-19T16:50:34.344,ns_1@10.242.238.88:<0.8571.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 700 to state replica [ns_server:debug,2014-08-19T16:50:34.344,ns_1@10.242.238.88:<0.6063.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 697 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.8573.1> [ns_server:debug,2014-08-19T16:50:34.345,ns_1@10.242.238.88:<0.5754.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 701 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.8574.1> [ns_server:info,2014-08-19T16:50:34.345,ns_1@10.242.238.88:<0.8573.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 697 to state replica [ns_server:info,2014-08-19T16:50:34.346,ns_1@10.242.238.88:<0.8574.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 701 to state replica [ns_server:debug,2014-08-19T16:50:34.352,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 423. Nacking mccouch update. [views:debug,2014-08-19T16:50:34.352,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/423. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:34.352,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",423,active,0} [ns_server:debug,2014-08-19T16:50:34.354,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,852,541,486,775,720,954,643,332,877,694,566,511,200,1005,928,800, 745,617,434,306,979,851,668,540,485,174,902,774,719,591,408,280,953,825,642, 514,459,148,876,693,565,510,382,1004,927,799,744,616,433,250,122,978,850,667, 539,484,356,901,773,718,590,224,952,824,641,513,458,330,875,692,564,509,198, 1003,926,798,743,615,432,304,977,849,666,538,483,172,900,772,717,589,406,278, 951,823,640,512,457,146,874,691,563,508,380,1002,925,797,742,614,431,248,120, 976,848,665,537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690, 562,507,196,1001,924,796,741,613,430,302,975,847,664,536,481,170,898,770,715, 587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000,923,795,740,612, 429,246,118,974,846,663,535,480,352,897,769,714,586,220,948,820,765,637,454, 326,999,871,688,560,505,194,922,794,739,611,428,300,973,845,662,534,479,168, 896,768,713,585,402,274,947,819,764,636,453,142,998,870,687,559,504,376,921, 793,738,610,427,244,116,972,844,661,533,478,350,895,712,584,218,1023,946,818, 763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660, 532,477,166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557, 502,374,919,791,736,608,425,242,114,970,842,659,531,476,348,893,710,582,216, 1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296, 969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994, 866,683,555,500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630, 447,136,992,864,681,553,498,370,915,787,732,604,238,110,966,838,655,527,472, 344,889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914, 786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811, 756,628,445,134,990,862,679,551,496,368,913,785,730,602,236,108,964,836,653, 525,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495, 184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014, 937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,234,962,834, 651,523,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548, 493,182,910,782,727,599,416,288,961,833,650,522,467,156,884,701,573,390,262, 1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781,726,598,232,960, 832,649,521,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674, 546,491,180,908,780,725,597,414,286,959,831,648,520,465,154,882,699,571,388, 260,1010,805,750,439,128,984,673,362,907,596,230,830,519,464,698,1009,932, 621,310,855,544,489,178,778,723,412,957,646,880,569,258,803,748,437,126,982, 671,360,905,594,228,828,517,462,696,1007,930,619,308,853,542,487,176,776,721, 410,955,644,878,567,256,801,746,435,124,980,669,358,903,592,226,826,515,460] [ns_server:debug,2014-08-19T16:50:34.372,ns_1@10.242.238.88:<0.8569.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_702 [rebalance:info,2014-08-19T16:50:34.375,ns_1@10.242.238.88:<0.8569.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[702]}, {checkpoints,[{702,1}]}, {name,<<"rebalance_702">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[702]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"702"}]} [rebalance:debug,2014-08-19T16:50:34.376,ns_1@10.242.238.88:<0.8569.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8575.1> [rebalance:info,2014-08-19T16:50:34.377,ns_1@10.242.238.88:<0.8569.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:34.378,ns_1@10.242.238.88:<0.8569.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:34.378,ns_1@10.242.238.88:<0.8569.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:34.379,ns_1@10.242.238.88:<0.5667.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 702 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:34.381,ns_1@10.242.238.88:<0.5675.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:34.384,ns_1@10.242.238.88:<0.5675.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_702_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:34.384,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 702 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:34.385,ns_1@10.242.238.88:<0.8579.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 702 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:34.387,ns_1@10.242.238.88:<0.8571.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_700 [rebalance:info,2014-08-19T16:50:34.389,ns_1@10.242.238.88:<0.8571.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[700]}, {checkpoints,[{700,1}]}, {name,<<"rebalance_700">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[700]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"700"}]} [rebalance:debug,2014-08-19T16:50:34.390,ns_1@10.242.238.88:<0.8571.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8580.1> [rebalance:info,2014-08-19T16:50:34.390,ns_1@10.242.238.88:<0.8571.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:34.393,ns_1@10.242.238.88:<0.8571.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:34.393,ns_1@10.242.238.88:<0.8571.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:34.394,ns_1@10.242.238.88:<0.5832.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 700 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:34.396,ns_1@10.242.238.88:<0.5840.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:34.399,ns_1@10.242.238.88:<0.5840.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_700_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:34.399,ns_1@10.242.238.88:<0.8583.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 703) [rebalance:info,2014-08-19T16:50:34.399,ns_1@10.242.238.88:<0.7099.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 940 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:34.399,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 940 state to active [rebalance:info,2014-08-19T16:50:34.400,ns_1@10.242.238.88:<0.5564.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:34.401,ns_1@10.242.238.88:<0.7099.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 940 on ns_1@10.242.238.88 [ns_server:debug,2014-08-19T16:50:34.401,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.401,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:50:34.401,ns_1@10.242.238.88:<0.7099.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:34.401,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{702, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:34.402,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:50:34.402,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/423. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:34.402,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.402,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",423,active,0} [ns_server:info,2014-08-19T16:50:34.403,ns_1@10.242.238.88:<0.5572.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_703_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:34.403,ns_1@10.242.238.88:<0.5564.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:34.407,ns_1@10.242.238.88:<0.8574.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_701 [ns_server:debug,2014-08-19T16:50:34.407,ns_1@10.242.238.88:<0.5564.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 703 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.8598.1> [ns_server:info,2014-08-19T16:50:34.408,ns_1@10.242.238.88:<0.8598.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 703 to state replica [rebalance:info,2014-08-19T16:50:34.409,ns_1@10.242.238.88:<0.8574.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[701]}, {checkpoints,[{701,1}]}, {name,<<"rebalance_701">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[701]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"701"}]} [rebalance:debug,2014-08-19T16:50:34.409,ns_1@10.242.238.88:<0.8574.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8599.1> [rebalance:info,2014-08-19T16:50:34.410,ns_1@10.242.238.88:<0.8574.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:34.410,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 702 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:debug,2014-08-19T16:50:34.411,ns_1@10.242.238.88:<0.8574.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:34.412,ns_1@10.242.238.88:<0.8574.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:34.412,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 702) [ns_server:debug,2014-08-19T16:50:34.412,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:34.412,ns_1@10.242.238.88:<0.5754.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 701 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:34.412,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 700 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:34.413,ns_1@10.242.238.88:<0.8602.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 700 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:debug,2014-08-19T16:50:34.414,ns_1@10.242.238.88:<0.5762.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:34.418,ns_1@10.242.238.88:<0.5762.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_701_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:34.421,ns_1@10.242.238.88:<0.8570.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_699 [rebalance:info,2014-08-19T16:50:34.422,ns_1@10.242.238.88:<0.8570.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[699]}, {checkpoints,[{699,1}]}, {name,<<"rebalance_699">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[699]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"699"}]} [rebalance:debug,2014-08-19T16:50:34.423,ns_1@10.242.238.88:<0.8570.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8605.1> [rebalance:info,2014-08-19T16:50:34.424,ns_1@10.242.238.88:<0.8570.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:34.426,ns_1@10.242.238.88:<0.8570.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:34.426,ns_1@10.242.238.88:<0.8570.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:34.427,ns_1@10.242.238.88:<0.5909.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 699 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:34.429,ns_1@10.242.238.88:<0.5917.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:34.433,ns_1@10.242.238.88:<0.8573.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_697 [rebalance:info,2014-08-19T16:50:34.435,ns_1@10.242.238.88:<0.8573.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[697]}, {checkpoints,[{697,1}]}, {name,<<"rebalance_697">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[697]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"697"}]} [rebalance:debug,2014-08-19T16:50:34.436,ns_1@10.242.238.88:<0.8573.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8606.1> [rebalance:info,2014-08-19T16:50:34.438,ns_1@10.242.238.88:<0.8573.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:34.440,ns_1@10.242.238.88:<0.8573.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:34.440,ns_1@10.242.238.88:<0.8573.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:34.441,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:34.441,ns_1@10.242.238.88:<0.6063.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 697 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:34.442,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{700, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:34.442,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:34.443,ns_1@10.242.238.88:<0.6071.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:34.443,ns_1@10.242.238.88:<0.5917.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_699_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:34.444,ns_1@10.242.238.88:<0.6955.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 942 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:debug,2014-08-19T16:50:34.444,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:34.444,ns_1@10.242.238.88:<0.7176.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 939 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:34.445,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 942 state to active [ns_server:debug,2014-08-19T16:50:34.446,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:50:34.447,ns_1@10.242.238.88:<0.6955.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 942 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:34.447,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 939 state to active [ns_server:info,2014-08-19T16:50:34.447,ns_1@10.242.238.88:<0.6071.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_697_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:34.449,ns_1@10.242.238.88:<0.7176.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 939 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:34.449,ns_1@10.242.238.88:<0.6955.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:34.449,ns_1@10.242.238.88:<0.7176.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:34.453,ns_1@10.242.238.88:<0.8598.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_703 [rebalance:info,2014-08-19T16:50:34.452,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 700 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:50:34.457,ns_1@10.242.238.88:<0.8598.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[703]}, {checkpoints,[{703,1}]}, {name,<<"rebalance_703">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[703]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"703"}]} [ns_server:debug,2014-08-19T16:50:34.457,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 700) [rebalance:debug,2014-08-19T16:50:34.458,ns_1@10.242.238.88:<0.8598.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8628.1> [ns_server:debug,2014-08-19T16:50:34.458,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:34.458,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 701 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:34.458,ns_1@10.242.238.88:<0.8633.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 701 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:info,2014-08-19T16:50:34.459,ns_1@10.242.238.88:<0.8598.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:34.462,ns_1@10.242.238.88:<0.8598.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:34.462,ns_1@10.242.238.88:<0.8598.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:34.463,ns_1@10.242.238.88:<0.5564.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 703 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:34.464,ns_1@10.242.238.88:<0.5572.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:34.468,ns_1@10.242.238.88:<0.5572.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_703_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:34.474,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.475,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{701, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:34.475,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.475,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:34.475,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:34.482,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 701 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:34.482,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 701) [ns_server:debug,2014-08-19T16:50:34.483,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:34.483,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 699 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:34.483,ns_1@10.242.238.88:<0.8657.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 699 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:34.498,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.499,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:34.499,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.499,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{699, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:34.499,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:34.511,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 699 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:34.511,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 699) [ns_server:debug,2014-08-19T16:50:34.512,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:34.512,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 697 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:34.512,ns_1@10.242.238.88:<0.8668.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 697 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:34.528,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.529,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:34.529,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{697, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:34.529,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.530,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:34.537,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 697 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:34.537,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 697) [ns_server:debug,2014-08-19T16:50:34.538,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:34.538,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 703 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:34.539,ns_1@10.242.238.88:<0.8678.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 703 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:34.541,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 421. Nacking mccouch update. [views:debug,2014-08-19T16:50:34.541,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/421. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:34.542,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",421,active,0} [ns_server:debug,2014-08-19T16:50:34.543,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,852,541,486,775,720,954,643,332,877,694,566,511,200,1005,928,800, 745,617,434,306,979,851,668,540,485,174,902,774,719,591,408,280,953,825,642, 514,459,148,876,693,565,510,382,1004,927,799,744,616,433,250,122,978,850,667, 539,484,356,901,773,718,590,224,952,824,641,513,458,330,875,692,564,509,198, 1003,926,798,743,615,432,304,977,849,666,538,483,172,900,772,717,589,406,278, 951,823,640,512,457,146,874,691,563,508,380,1002,925,797,742,614,431,248,120, 976,848,665,537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690, 562,507,196,1001,924,796,741,613,430,302,975,847,664,536,481,170,898,770,715, 587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000,923,795,740,612, 429,246,118,974,846,663,535,480,352,897,769,714,586,220,948,820,765,637,454, 326,999,871,688,560,505,194,922,794,739,611,428,300,973,845,662,534,479,168, 896,768,713,585,402,274,947,819,764,636,453,142,998,870,687,559,504,376,921, 793,738,610,427,244,116,972,844,661,533,478,350,895,712,584,218,1023,946,818, 763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660, 532,477,166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557, 502,374,919,791,736,608,425,242,114,970,842,659,531,476,348,893,710,582,216, 1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296, 969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994, 866,683,555,500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630, 447,136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527, 472,344,889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186, 914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939, 811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,236,108,964,836, 653,525,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550, 495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264, 1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,234,962, 834,651,523,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676, 548,493,182,910,782,727,599,416,288,961,833,650,522,467,156,884,701,573,390, 262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781,726,598,232, 960,832,649,521,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857, 674,546,491,180,908,780,725,597,414,286,959,831,648,520,465,154,882,699,571, 388,260,1010,805,750,439,128,984,673,362,907,596,230,830,519,464,698,1009, 932,621,310,855,544,489,178,778,723,412,957,646,880,569,258,803,748,437,126, 982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853,542,487,176,776, 721,410,955,644,878,567,256,801,746,435,124,980,669,358,903,592,226,826,515, 460] [ns_server:debug,2014-08-19T16:50:34.554,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.555,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:34.556,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:34.556,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{703, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:34.556,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:34.565,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 703 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:34.566,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 703) [ns_server:debug,2014-08-19T16:50:34.566,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [views:debug,2014-08-19T16:50:34.575,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/421. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:34.575,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",421,active,0} [rebalance:info,2014-08-19T16:50:34.578,ns_1@10.242.238.88:<0.7022.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 941 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:34.578,ns_1@10.242.238.88:<0.6787.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 944 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:34.578,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 941 state to active [rebalance:info,2014-08-19T16:50:34.579,ns_1@10.242.238.88:<0.7022.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 941 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:34.579,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 944 state to active [rebalance:info,2014-08-19T16:50:34.580,ns_1@10.242.238.88:<0.6787.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 944 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:34.581,ns_1@10.242.238.88:<0.7022.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:34.581,ns_1@10.242.238.88:<0.6787.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:34.650,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 419. Nacking mccouch update. [views:debug,2014-08-19T16:50:34.650,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/419. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:34.650,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",419,active,0} [ns_server:debug,2014-08-19T16:50:34.652,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,852,541,486,775,720,954,643,332,877,566,511,200,928,800,745,617, 434,306,979,851,668,540,485,174,902,774,719,591,408,280,953,825,642,514,459, 148,876,693,565,510,382,1004,927,799,744,616,433,250,122,978,850,667,539,484, 356,901,773,718,590,224,952,824,641,513,458,330,875,692,564,509,198,1003,926, 798,743,615,432,304,977,849,666,538,483,172,900,772,717,589,406,278,951,823, 640,512,457,146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976,848, 665,537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,507, 196,1001,924,796,741,613,430,302,975,847,664,536,481,170,898,770,715,587,404, 276,949,821,766,638,455,144,872,689,561,506,378,1000,923,795,740,612,429,246, 118,974,846,663,535,480,352,897,769,714,586,220,948,820,765,637,454,326,999, 871,688,560,505,194,922,794,739,611,428,300,973,845,662,534,479,168,896,768, 713,585,402,274,947,819,764,636,453,142,998,870,687,559,504,376,921,793,738, 610,427,244,116,972,844,661,533,478,350,895,712,584,218,1023,946,818,763,635, 452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660,532,477, 166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374, 919,791,736,608,425,242,114,970,842,659,531,476,348,893,710,582,216,1021,944, 816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969,841, 658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683, 555,500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580, 214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422, 294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136, 992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344, 889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786, 731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756, 628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964,836,653, 525,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495, 184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014, 937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,234,962,834, 651,523,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676,548, 493,182,910,782,727,599,416,288,961,833,650,522,467,156,884,701,573,390,262, 1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781,726,598,232,960, 832,649,521,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857,674, 546,491,180,908,780,725,597,414,286,959,831,648,520,465,154,882,699,571,388, 260,1010,805,750,439,128,984,673,362,907,596,230,830,519,464,698,1009,932, 621,310,855,544,489,178,778,723,412,957,646,880,569,258,803,748,437,126,982, 671,360,905,594,228,828,517,462,696,1007,930,619,308,853,542,487,176,776,721, 410,955,644,878,567,256,801,746,435,124,980,669,358,903,592,226,826,515,460, 694,1005] [views:debug,2014-08-19T16:50:34.684,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/419. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:34.684,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",419,active,0} [rebalance:info,2014-08-19T16:50:34.704,ns_1@10.242.238.88:<0.6632.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 946 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:34.704,ns_1@10.242.238.88:<0.6878.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 943 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:34.704,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 946 state to active [rebalance:info,2014-08-19T16:50:34.706,ns_1@10.242.238.88:<0.6632.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 946 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:34.706,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 943 state to active [rebalance:info,2014-08-19T16:50:34.707,ns_1@10.242.238.88:<0.6878.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 943 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:34.707,ns_1@10.242.238.88:<0.6632.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:34.707,ns_1@10.242.238.88:<0.6878.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:34.759,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 417. Nacking mccouch update. [views:debug,2014-08-19T16:50:34.759,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/417. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:34.760,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",417,active,0} [ns_server:debug,2014-08-19T16:50:34.761,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,852,541,486,775,720,954,643,332,877,566,511,200,928,800,745,617, 434,306,979,851,668,540,485,174,902,774,719,591,408,280,953,825,642,514,459, 148,876,693,565,510,382,1004,927,799,744,616,433,250,122,978,850,667,539,484, 356,901,773,718,590,224,952,824,641,513,458,330,875,692,564,509,198,1003,926, 798,743,615,432,304,977,849,666,538,483,172,900,772,717,589,406,278,951,823, 640,512,457,146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976,848, 665,537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,507, 196,1001,924,796,741,613,430,302,975,847,664,536,481,170,898,770,715,587,404, 276,949,821,766,638,455,144,872,689,561,506,378,1000,923,795,740,612,429,246, 118,974,846,663,535,480,352,897,769,714,586,220,948,820,765,637,454,326,999, 871,688,560,505,194,922,794,739,611,428,300,973,845,662,534,479,168,896,768, 713,585,402,274,947,819,764,636,453,142,998,870,687,559,504,376,921,793,738, 610,427,244,116,972,844,661,533,478,350,895,712,584,218,1023,946,818,763,635, 452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660,532,477, 166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374, 919,791,736,608,425,242,114,970,842,659,531,476,348,893,710,582,216,1021,944, 816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969,841, 658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683, 555,500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580, 214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422, 294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136, 992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344, 889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786, 731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756, 628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964,836,653, 525,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495, 184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014, 937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,417,234,962, 834,651,523,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676, 548,493,182,910,782,727,599,416,288,961,833,650,522,467,156,884,701,573,390, 262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781,726,598,232, 960,832,649,521,466,338,883,700,572,206,1011,934,806,751,623,440,312,985,857, 674,546,491,180,908,780,725,597,414,286,959,831,648,520,465,154,882,699,571, 388,260,1010,805,750,439,128,984,673,362,907,596,230,830,519,464,698,1009, 932,621,310,855,544,489,178,778,723,412,957,646,880,569,258,803,748,437,126, 982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853,542,487,176,776, 721,410,955,644,878,567,256,801,746,435,124,980,669,358,903,592,226,826,515, 460,694,1005] [views:debug,2014-08-19T16:50:34.793,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/417. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:34.794,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",417,active,0} [rebalance:info,2014-08-19T16:50:34.837,ns_1@10.242.238.88:<0.6473.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 948 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:34.837,ns_1@10.242.238.88:<0.6710.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 945 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:34.838,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 948 state to active [rebalance:info,2014-08-19T16:50:34.839,ns_1@10.242.238.88:<0.6473.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 948 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:34.839,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 945 state to active [rebalance:info,2014-08-19T16:50:34.840,ns_1@10.242.238.88:<0.6710.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 945 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:34.840,ns_1@10.242.238.88:<0.6473.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:34.841,ns_1@10.242.238.88:<0.6710.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:34.869,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 415. Nacking mccouch update. [views:debug,2014-08-19T16:50:34.869,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/415. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:34.869,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",415,active,0} [ns_server:debug,2014-08-19T16:50:34.871,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,958,647,336,881,570,204,804,749,438,983,672,906, 595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956,645, 334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384,1006, 929,618,252,852,541,486,775,720,954,643,332,877,566,511,200,928,800,745,617, 434,306,979,851,668,540,485,174,902,774,719,591,408,280,953,825,642,514,459, 148,876,693,565,510,382,1004,927,799,744,616,433,250,122,978,850,667,539,484, 356,901,773,718,590,224,952,824,641,513,458,330,875,692,564,509,198,1003,926, 798,743,615,432,304,977,849,666,538,483,172,900,772,717,589,406,278,951,823, 640,512,457,146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976,848, 665,537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,507, 196,1001,924,796,741,613,430,302,975,847,664,536,481,170,898,770,715,587,404, 276,949,821,766,638,455,144,872,689,561,506,378,1000,923,795,740,612,429,246, 118,974,846,663,535,480,352,897,769,714,586,220,948,820,765,637,454,326,999, 871,688,560,505,194,922,794,739,611,428,300,973,845,662,534,479,168,896,768, 713,585,402,274,947,819,764,636,453,142,998,870,687,559,504,376,921,793,738, 610,427,244,116,972,844,661,533,478,350,895,712,584,218,1023,946,818,763,635, 452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660,532,477, 166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374, 919,791,736,608,425,242,114,970,842,659,531,476,348,893,710,582,216,1021,944, 816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969,841, 658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683, 555,500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580, 214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422, 294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136, 992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344, 889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786, 731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756, 628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964,836,653, 525,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495, 184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014, 937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,417,234,962, 834,651,523,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676, 548,493,182,910,782,727,599,416,288,961,833,650,522,467,156,884,701,573,390, 262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781,726,598,415, 232,960,832,649,521,466,338,883,700,572,206,1011,934,806,751,623,440,312,985, 857,674,546,491,180,908,780,725,597,414,286,959,831,648,520,465,154,882,699, 571,388,260,1010,805,750,439,128,984,673,362,907,596,230,830,519,464,698, 1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569,258,803,748,437, 126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853,542,487,176, 776,721,410,955,644,878,567,256,801,746,435,124,980,669,358,903,592,226,826, 515,460,694,1005] [views:debug,2014-08-19T16:50:34.903,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/415. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:34.903,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",415,active,0} [rebalance:info,2014-08-19T16:50:34.971,ns_1@10.242.238.88:<0.6555.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 947 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:34.971,ns_1@10.242.238.88:<0.6287.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 950 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:34.972,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 947 state to active [rebalance:info,2014-08-19T16:50:34.973,ns_1@10.242.238.88:<0.6555.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 947 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:34.973,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 950 state to active [rebalance:info,2014-08-19T16:50:34.974,ns_1@10.242.238.88:<0.6287.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 950 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:34.974,ns_1@10.242.238.88:<0.6555.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:34.975,ns_1@10.242.238.88:<0.6287.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:34.978,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 413. Nacking mccouch update. [views:debug,2014-08-19T16:50:34.978,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/413. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:34.978,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",413,active,0} [ns_server:debug,2014-08-19T16:50:34.979,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,956, 645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695,384, 1006,929,618,252,852,541,486,775,720,954,643,332,877,566,511,200,928,800,745, 617,434,306,979,851,668,540,485,174,902,774,719,591,408,280,953,825,642,514, 459,148,876,693,565,510,382,1004,927,799,744,616,433,250,122,978,850,667,539, 484,356,901,773,718,590,224,952,824,641,513,458,330,875,692,564,509,198,1003, 926,798,743,615,432,304,977,849,666,538,483,172,900,772,717,589,406,278,951, 823,640,512,457,146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976, 848,665,537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562, 507,196,1001,924,796,741,613,430,302,975,847,664,536,481,170,898,770,715,587, 404,276,949,821,766,638,455,144,872,689,561,506,378,1000,923,795,740,612,429, 246,118,974,846,663,535,480,352,897,769,714,586,220,948,820,765,637,454,326, 999,871,688,560,505,194,922,794,739,611,428,300,973,845,662,534,479,168,896, 768,713,585,402,274,947,819,764,636,453,142,998,870,687,559,504,376,921,793, 738,610,427,244,116,972,844,661,533,478,350,895,712,584,218,1023,946,818,763, 635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660,532, 477,166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502, 374,919,791,736,608,425,242,114,970,842,659,531,476,348,893,710,582,216,1021, 944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969, 841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866, 683,555,500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708, 580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605, 422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447, 136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472, 344,889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914, 786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811, 756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964,836, 653,525,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550, 495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264, 1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,417,234, 962,834,651,523,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859, 676,548,493,182,910,782,727,599,416,288,961,833,650,522,467,156,884,701,573, 390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781,726,598, 415,232,960,832,649,521,466,338,883,700,572,206,1011,934,806,751,623,440,312, 985,857,674,546,491,180,908,780,725,597,414,286,959,831,648,520,465,154,882, 699,571,388,260,1010,805,750,439,128,984,673,362,907,596,230,830,519,464,698, 1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569,258,803,748,437, 126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853,542,487,176, 776,721,410,955,644,878,567,256,801,746,435,124,980,669,358,903,592,226,826, 515,460,694,1005] [views:debug,2014-08-19T16:50:35.012,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/413. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:35.012,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",413,active,0} [rebalance:info,2014-08-19T16:50:35.098,ns_1@10.242.238.88:<0.6364.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 949 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:35.098,ns_1@10.242.238.88:<0.8791.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 952) [ns_server:info,2014-08-19T16:50:35.099,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 949 state to active [rebalance:info,2014-08-19T16:50:35.099,ns_1@10.242.238.88:<0.6127.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:35.100,ns_1@10.242.238.88:<0.6364.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 949 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:35.100,ns_1@10.242.238.88:<0.6364.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:35.103,ns_1@10.242.238.88:<0.6140.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_952_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.103,ns_1@10.242.238.88:<0.6127.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:35.105,ns_1@10.242.238.88:<0.6127.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 952 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.8798.1> [ns_server:info,2014-08-19T16:50:35.106,ns_1@10.242.238.88:<0.8798.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 952 to state replica [ns_server:debug,2014-08-19T16:50:35.135,ns_1@10.242.238.88:<0.8798.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_952 [rebalance:info,2014-08-19T16:50:35.137,ns_1@10.242.238.88:<0.8798.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[952]}, {checkpoints,[{952,1}]}, {name,<<"rebalance_952">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[952]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"952"}]} [rebalance:debug,2014-08-19T16:50:35.137,ns_1@10.242.238.88:<0.8798.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8799.1> [rebalance:info,2014-08-19T16:50:35.139,ns_1@10.242.238.88:<0.8798.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:35.141,ns_1@10.242.238.88:<0.8798.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:35.141,ns_1@10.242.238.88:<0.8798.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:35.142,ns_1@10.242.238.88:<0.6127.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 952 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:35.143,ns_1@10.242.238.88:<0.6140.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:35.146,ns_1@10.242.238.88:<0.6140.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_952_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:35.146,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 952 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:35.147,ns_1@10.242.238.88:<0.8803.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 952 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:50:35.148,ns_1@10.242.238.88:<0.6210.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 951 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:35.148,ns_1@10.242.238.88:<0.8804.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 954) [ns_server:info,2014-08-19T16:50:35.149,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 951 state to active [rebalance:info,2014-08-19T16:50:35.149,ns_1@10.242.238.88:<0.5965.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:35.150,ns_1@10.242.238.88:<0.6210.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 951 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:35.150,ns_1@10.242.238.88:<0.6210.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:35.152,ns_1@10.242.238.88:<0.5973.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_954_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.152,ns_1@10.242.238.88:<0.5965.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:35.154,ns_1@10.242.238.88:<0.5965.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 954 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.8811.1> [ns_server:info,2014-08-19T16:50:35.155,ns_1@10.242.238.88:<0.8811.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 954 to state replica [ns_server:debug,2014-08-19T16:50:35.169,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 411. Nacking mccouch update. [views:debug,2014-08-19T16:50:35.170,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/411. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:35.170,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",411,active,0} [ns_server:debug,2014-08-19T16:50:35.172,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,954,643,332,877,566,511,200,928,800, 745,617,434,306,979,851,668,540,485,174,902,774,719,591,408,280,953,825,642, 514,459,148,876,693,565,510,382,1004,927,799,744,616,433,250,122,978,850,667, 539,484,356,901,773,718,590,224,952,824,641,513,458,330,875,692,564,509,198, 1003,926,798,743,615,432,304,977,849,666,538,483,172,900,772,717,589,406,278, 951,823,640,512,457,146,874,691,563,508,380,1002,925,797,742,614,431,248,120, 976,848,665,537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690, 562,507,196,1001,924,796,741,613,430,302,975,847,664,536,481,170,898,770,715, 587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000,923,795,740,612, 429,246,118,974,846,663,535,480,352,897,769,714,586,220,948,820,765,637,454, 326,999,871,688,560,505,194,922,794,739,611,428,300,973,845,662,534,479,168, 896,768,713,585,402,274,947,819,764,636,453,142,998,870,687,559,504,376,921, 793,738,610,427,244,116,972,844,661,533,478,350,895,712,584,218,1023,946,818, 763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660, 532,477,166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557, 502,374,919,791,736,608,425,242,114,970,842,659,531,476,348,893,710,582,216, 1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296, 969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994, 866,683,555,500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630, 447,136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527, 472,344,889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186, 914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939, 811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964, 836,653,525,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678, 550,495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392, 264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,417, 234,962,834,651,523,468,340,885,702,574,208,1013,936,808,753,625,442,314,987, 859,676,548,493,182,910,782,727,599,416,288,961,833,650,522,467,156,884,701, 573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781,726, 598,415,232,960,832,649,521,466,338,883,700,572,206,1011,934,806,751,623,440, 312,985,857,674,546,491,180,908,780,725,597,414,286,959,831,648,520,465,154, 882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,230,830,519,464, 698,1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569,258,803,748, 437,126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853,542,487, 176,776,721,410,955,644,878,567,256,801,746,435,124,980,669,358,903,592,226, 826,515,460,694,1005] [ns_server:debug,2014-08-19T16:50:35.175,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.175,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{952, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.175,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:35.176,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.176,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:35.184,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 952 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:35.185,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 952) [ns_server:debug,2014-08-19T16:50:35.186,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:35.186,ns_1@10.242.238.88:<0.8811.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_954 [rebalance:info,2014-08-19T16:50:35.188,ns_1@10.242.238.88:<0.8811.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[954]}, {checkpoints,[{954,1}]}, {name,<<"rebalance_954">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[954]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"954"}]} [rebalance:debug,2014-08-19T16:50:35.188,ns_1@10.242.238.88:<0.8811.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8826.1> [rebalance:info,2014-08-19T16:50:35.189,ns_1@10.242.238.88:<0.8811.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:35.191,ns_1@10.242.238.88:<0.8811.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:35.191,ns_1@10.242.238.88:<0.8811.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:35.192,ns_1@10.242.238.88:<0.5965.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 954 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:35.194,ns_1@10.242.238.88:<0.5973.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:35.196,ns_1@10.242.238.88:<0.5973.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_954_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:35.197,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 954 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:35.197,ns_1@10.242.238.88:<0.8830.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 954 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:50:35.199,ns_1@10.242.238.88:<0.8832.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 953) [rebalance:info,2014-08-19T16:50:35.199,ns_1@10.242.238.88:<0.8831.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 956) [rebalance:info,2014-08-19T16:50:35.199,ns_1@10.242.238.88:<0.6042.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:35.200,ns_1@10.242.238.88:<0.5810.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:50:35.203,ns_1@10.242.238.88:<0.6050.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_953_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.203,ns_1@10.242.238.88:<0.6042.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.203,ns_1@10.242.238.88:<0.5818.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_956_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.204,ns_1@10.242.238.88:<0.5810.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:35.205,ns_1@10.242.238.88:<0.6042.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 953 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.8837.1> [ns_server:debug,2014-08-19T16:50:35.206,ns_1@10.242.238.88:<0.5810.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 956 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.8838.1> [ns_server:info,2014-08-19T16:50:35.206,ns_1@10.242.238.88:<0.8837.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 953 to state replica [ns_server:info,2014-08-19T16:50:35.207,ns_1@10.242.238.88:<0.8838.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 956 to state replica [ns_server:debug,2014-08-19T16:50:35.214,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.215,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.216,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:35.216,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{954, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.216,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:35.223,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 954 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:35.223,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 954) [ns_server:debug,2014-08-19T16:50:35.224,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:35.235,ns_1@10.242.238.88:<0.8837.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_953 [rebalance:info,2014-08-19T16:50:35.237,ns_1@10.242.238.88:<0.8837.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[953]}, {checkpoints,[{953,1}]}, {name,<<"rebalance_953">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[953]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"953"}]} [rebalance:debug,2014-08-19T16:50:35.239,ns_1@10.242.238.88:<0.8837.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8848.1> [rebalance:info,2014-08-19T16:50:35.240,ns_1@10.242.238.88:<0.8837.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:35.241,ns_1@10.242.238.88:<0.8837.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:35.241,ns_1@10.242.238.88:<0.8837.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:35.242,ns_1@10.242.238.88:<0.6042.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 953 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:35.243,ns_1@10.242.238.88:<0.6050.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:35.247,ns_1@10.242.238.88:<0.6050.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_953_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:35.247,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 953 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:35.247,ns_1@10.242.238.88:<0.8852.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 953 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:35.253,ns_1@10.242.238.88:<0.8838.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_956 [rebalance:info,2014-08-19T16:50:35.254,ns_1@10.242.238.88:<0.8838.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[956]}, {checkpoints,[{956,1}]}, {name,<<"rebalance_956">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[956]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"956"}]} [rebalance:debug,2014-08-19T16:50:35.255,ns_1@10.242.238.88:<0.8838.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8853.1> [views:debug,2014-08-19T16:50:35.256,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/411. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:35.256,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",411,active,0} [rebalance:info,2014-08-19T16:50:35.256,ns_1@10.242.238.88:<0.8838.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:35.258,ns_1@10.242.238.88:<0.8838.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:35.258,ns_1@10.242.238.88:<0.8838.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:35.259,ns_1@10.242.238.88:<0.5810.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 956 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:35.260,ns_1@10.242.238.88:<0.5818.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:35.264,ns_1@10.242.238.88:<0.5818.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_956_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:35.266,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.267,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{953, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.267,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.267,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:35.267,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:35.276,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 953 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:35.276,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 953) [ns_server:debug,2014-08-19T16:50:35.277,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:35.277,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 956 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:35.277,ns_1@10.242.238.88:<0.8866.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 956 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:35.295,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.296,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{956, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.296,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:35.296,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.297,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:35.307,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 956 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:35.307,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 956) [ns_server:debug,2014-08-19T16:50:35.308,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:35.376,ns_1@10.242.238.88:<0.8890.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 958) [rebalance:info,2014-08-19T16:50:35.376,ns_1@10.242.238.88:<0.8891.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 955) [rebalance:info,2014-08-19T16:50:35.377,ns_1@10.242.238.88:<0.5646.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:35.377,ns_1@10.242.238.88:<0.5888.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:debug,2014-08-19T16:50:35.380,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 409. Nacking mccouch update. [views:debug,2014-08-19T16:50:35.380,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/409. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:35.380,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",409,active,0} [ns_server:info,2014-08-19T16:50:35.381,ns_1@10.242.238.88:<0.5654.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_958_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.381,ns_1@10.242.238.88:<0.5646.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.381,ns_1@10.242.238.88:<0.5896.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_955_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.382,ns_1@10.242.238.88:<0.5888.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:35.383,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,851,668,540,485,174,902,774,719,591,408,280,953,825,642,514,459, 148,876,693,565,510,382,1004,927,799,744,616,433,250,122,978,850,667,539,484, 356,901,773,718,590,224,952,824,641,513,458,330,875,692,564,509,198,1003,926, 798,743,615,432,304,977,849,666,538,483,172,900,772,717,589,406,278,951,823, 640,512,457,146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976,848, 665,537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562,507, 196,1001,924,796,741,613,430,302,975,847,664,536,481,170,898,770,715,587,404, 276,949,821,766,638,455,144,872,689,561,506,378,1000,923,795,740,612,429,246, 118,974,846,663,535,480,352,897,769,714,586,220,948,820,765,637,454,326,999, 871,688,560,505,194,922,794,739,611,428,300,973,845,662,534,479,168,896,768, 713,585,402,274,947,819,764,636,453,142,998,870,687,559,504,376,921,793,738, 610,427,244,116,972,844,661,533,478,350,895,712,584,218,1023,946,818,763,635, 452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660,532,477, 166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374, 919,791,736,608,425,242,114,970,842,659,531,476,348,893,710,582,216,1021,944, 816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969,841, 658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683, 555,500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580, 214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422, 294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136, 992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344, 889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786, 731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756, 628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964,836,653, 525,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550,495, 184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264,1014, 937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,417,234,962, 834,651,523,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859,676, 548,493,182,910,782,727,599,416,288,961,833,650,522,467,156,884,701,573,390, 262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781,726,598,415, 232,960,832,649,521,466,338,883,700,572,206,1011,934,806,751,623,440,312,985, 857,674,546,491,180,908,780,725,597,414,286,959,831,648,520,465,154,882,699, 571,388,260,1010,805,750,439,128,984,673,362,907,596,230,830,519,464,698, 1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569,258,803,748,437, 126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853,542,487,176, 776,721,410,955,644,878,567,256,801,746,435,124,980,669,358,903,592,226,826, 515,460,694,1005,928,617,306] [ns_server:debug,2014-08-19T16:50:35.384,ns_1@10.242.238.88:<0.5646.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 958 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.8896.1> [ns_server:debug,2014-08-19T16:50:35.384,ns_1@10.242.238.88:<0.5888.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 955 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.8897.1> [ns_server:info,2014-08-19T16:50:35.385,ns_1@10.242.238.88:<0.8896.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 958 to state replica [ns_server:info,2014-08-19T16:50:35.385,ns_1@10.242.238.88:<0.8897.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 955 to state replica [ns_server:debug,2014-08-19T16:50:35.417,ns_1@10.242.238.88:<0.8896.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_958 [rebalance:info,2014-08-19T16:50:35.418,ns_1@10.242.238.88:<0.8896.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[958]}, {checkpoints,[{958,1}]}, {name,<<"rebalance_958">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[958]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"958"}]} [rebalance:debug,2014-08-19T16:50:35.419,ns_1@10.242.238.88:<0.8896.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8898.1> [rebalance:info,2014-08-19T16:50:35.420,ns_1@10.242.238.88:<0.8896.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:35.421,ns_1@10.242.238.88:<0.8896.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:35.422,ns_1@10.242.238.88:<0.8896.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:35.423,ns_1@10.242.238.88:<0.5646.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 958 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:35.424,ns_1@10.242.238.88:<0.5654.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:35.427,ns_1@10.242.238.88:<0.5654.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_958_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:35.428,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 958 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:35.428,ns_1@10.242.238.88:<0.8902.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 958 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:35.430,ns_1@10.242.238.88:<0.8897.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_955 [rebalance:info,2014-08-19T16:50:35.431,ns_1@10.242.238.88:<0.8897.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[955]}, {checkpoints,[{955,1}]}, {name,<<"rebalance_955">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[955]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"955"}]} [rebalance:debug,2014-08-19T16:50:35.432,ns_1@10.242.238.88:<0.8897.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8903.1> [rebalance:info,2014-08-19T16:50:35.433,ns_1@10.242.238.88:<0.8897.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:35.435,ns_1@10.242.238.88:<0.8897.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:35.435,ns_1@10.242.238.88:<0.8897.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:35.436,ns_1@10.242.238.88:<0.5888.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 955 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:35.438,ns_1@10.242.238.88:<0.5896.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:35.441,ns_1@10.242.238.88:<0.5896.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_955_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:35.446,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.448,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{958, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.448,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.448,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:35.450,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:50:35.455,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/409. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:35.456,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",409,active,0} [rebalance:info,2014-08-19T16:50:35.456,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 958 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:35.457,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 958) [ns_server:debug,2014-08-19T16:50:35.458,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:35.458,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 955 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:35.458,ns_1@10.242.238.88:<0.8917.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 955 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:35.477,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.477,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.478,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{955, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.478,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:35.479,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:35.488,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 955 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:35.489,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 955) [ns_server:debug,2014-08-19T16:50:35.490,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:35.527,ns_1@10.242.238.88:<0.8927.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 957) [rebalance:info,2014-08-19T16:50:35.528,ns_1@10.242.238.88:<0.5731.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:50:35.531,ns_1@10.242.238.88:<0.5739.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_957_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.531,ns_1@10.242.238.88:<0.5731.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:35.534,ns_1@10.242.238.88:<0.5731.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 957 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.8930.1> [ns_server:info,2014-08-19T16:50:35.535,ns_1@10.242.238.88:<0.8930.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 957 to state replica [ns_server:debug,2014-08-19T16:50:35.564,ns_1@10.242.238.88:<0.8930.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_957 [rebalance:info,2014-08-19T16:50:35.565,ns_1@10.242.238.88:<0.8930.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[957]}, {checkpoints,[{957,1}]}, {name,<<"rebalance_957">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[957]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"957"}]} [rebalance:debug,2014-08-19T16:50:35.566,ns_1@10.242.238.88:<0.8930.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8945.1> [rebalance:info,2014-08-19T16:50:35.567,ns_1@10.242.238.88:<0.8930.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:35.568,ns_1@10.242.238.88:<0.8930.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:35.568,ns_1@10.242.238.88:<0.8930.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:35.569,ns_1@10.242.238.88:<0.5731.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 957 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:35.571,ns_1@10.242.238.88:<0.5739.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:35.575,ns_1@10.242.238.88:<0.5739.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_957_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:35.575,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 957 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:35.575,ns_1@10.242.238.88:<0.8949.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 957 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:50:35.579,ns_1@10.242.238.88:<0.8950.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 959) [rebalance:info,2014-08-19T16:50:35.580,ns_1@10.242.238.88:<0.5585.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:50:35.583,ns_1@10.242.238.88:<0.5598.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_959_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.583,ns_1@10.242.238.88:<0.5585.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:35.586,ns_1@10.242.238.88:<0.5585.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 959 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.8953.1> [ns_server:info,2014-08-19T16:50:35.586,ns_1@10.242.238.88:<0.8953.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 959 to state replica [ns_server:debug,2014-08-19T16:50:35.593,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.594,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:35.594,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{957, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.594,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.595,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:35.601,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 957 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:35.602,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 957) [ns_server:debug,2014-08-19T16:50:35.602,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:35.615,ns_1@10.242.238.88:<0.8953.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_959 [rebalance:info,2014-08-19T16:50:35.617,ns_1@10.242.238.88:<0.8953.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[959]}, {checkpoints,[{959,1}]}, {name,<<"rebalance_959">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[959]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"959"}]} [rebalance:debug,2014-08-19T16:50:35.618,ns_1@10.242.238.88:<0.8953.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.8963.1> [rebalance:info,2014-08-19T16:50:35.619,ns_1@10.242.238.88:<0.8953.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:35.620,ns_1@10.242.238.88:<0.8953.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:35.620,ns_1@10.242.238.88:<0.8953.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:35.622,ns_1@10.242.238.88:<0.5585.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 959 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:35.622,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 407. Nacking mccouch update. [views:debug,2014-08-19T16:50:35.622,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/407. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:35.623,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",407,active,0} [rebalance:debug,2014-08-19T16:50:35.623,ns_1@10.242.238.88:<0.5598.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.625,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,851,668,540,485,174,902,774,719,591,408,280,953,825,642,514,459, 148,876,693,565,510,382,1004,927,799,744,616,433,250,122,978,850,667,539,484, 356,901,773,718,590,407,224,952,824,641,513,458,330,875,692,564,509,198,1003, 926,798,743,615,432,304,977,849,666,538,483,172,900,772,717,589,406,278,951, 823,640,512,457,146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976, 848,665,537,482,354,899,771,716,588,222,950,822,767,639,456,328,873,690,562, 507,196,1001,924,796,741,613,430,302,975,847,664,536,481,170,898,770,715,587, 404,276,949,821,766,638,455,144,872,689,561,506,378,1000,923,795,740,612,429, 246,118,974,846,663,535,480,352,897,769,714,586,220,948,820,765,637,454,326, 999,871,688,560,505,194,922,794,739,611,428,300,973,845,662,534,479,168,896, 768,713,585,402,274,947,819,764,636,453,142,998,870,687,559,504,376,921,793, 738,610,427,244,116,972,844,661,533,478,350,895,712,584,218,1023,946,818,763, 635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660,532, 477,166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502, 374,919,791,736,608,425,242,114,970,842,659,531,476,348,893,710,582,216,1021, 944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969, 841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866, 683,555,500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708, 580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605, 422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447, 136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472, 344,889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914, 786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811, 756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964,836, 653,525,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550, 495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264, 1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,417,234, 962,834,651,523,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859, 676,548,493,182,910,782,727,599,416,288,961,833,650,522,467,156,884,701,573, 390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781,726,598, 415,232,960,832,649,521,466,338,883,700,572,206,1011,934,806,751,623,440,312, 985,857,674,546,491,180,908,780,725,597,414,286,959,831,648,520,465,154,882, 699,571,388,260,1010,805,750,439,128,984,673,362,907,596,230,830,519,464,698, 1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569,258,803,748,437, 126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853,542,487,176, 776,721,410,955,644,878,567,256,801,746,435,124,980,669,358,903,592,226,826, 515,460,694,1005,928,617,306] [ns_server:info,2014-08-19T16:50:35.627,ns_1@10.242.238.88:<0.5598.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_959_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:35.627,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 959 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:35.627,ns_1@10.242.238.88:<0.8967.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 959 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:35.645,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.645,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:35.646,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.646,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{959, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.647,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:35.653,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 959 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:35.654,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 959) [ns_server:debug,2014-08-19T16:50:35.654,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:35.696,ns_1@10.242.238.88:<0.8977.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 441) [rebalance:info,2014-08-19T16:50:35.696,ns_1@10.242.238.88:<0.8978.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 432) [rebalance:info,2014-08-19T16:50:35.696,ns_1@10.242.238.88:<0.8979.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 428) [rebalance:info,2014-08-19T16:50:35.696,ns_1@10.242.238.88:<0.8980.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 430) [rebalance:info,2014-08-19T16:50:35.696,ns_1@10.242.238.88:<0.8981.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 434) [rebalance:info,2014-08-19T16:50:35.696,ns_1@10.242.238.88:<0.8984.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 427) [rebalance:info,2014-08-19T16:50:35.696,ns_1@10.242.238.88:<0.8983.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 438) [rebalance:info,2014-08-19T16:50:35.696,ns_1@10.242.238.88:<0.8982.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 436) [rebalance:info,2014-08-19T16:50:35.697,ns_1@10.242.238.88:<0.8986.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 684) [rebalance:info,2014-08-19T16:50:35.697,ns_1@10.242.238.88:<0.8987.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 686) [rebalance:info,2014-08-19T16:50:35.697,ns_1@10.242.238.88:<0.8988.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 429) [rebalance:info,2014-08-19T16:50:35.697,ns_1@10.242.238.88:<0.8985.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 431) [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.7155.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.8989.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 433) [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.8990.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 688) [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.8991.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 692) [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.8992.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 435) [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.6843.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.8993.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 694) [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.8994.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 437) [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.7134.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.8995.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 690) [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.6976.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.6098.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.8996.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 439) [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.8998.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 683) [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.8997.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 685) [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.6689.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.6329.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.7001.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.8999.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 940) [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.9000.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 938) [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.9001.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 689) [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.9002.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 944) [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.6515.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.9003.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 942) [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.9004.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 687) [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.7232.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.9005.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 946) [rebalance:info,2014-08-19T16:50:35.698,ns_1@10.242.238.88:<0.9006.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 691) [rebalance:info,2014-08-19T16:50:35.699,ns_1@10.242.238.88:<0.9007.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 948) [rebalance:info,2014-08-19T16:50:35.699,ns_1@10.242.238.88:<0.9008.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 693) [rebalance:info,2014-08-19T16:50:35.699,ns_1@10.242.238.88:<0.9009.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 695) [rebalance:info,2014-08-19T16:50:35.699,ns_1@10.242.238.88:<0.6494.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:35.699,ns_1@10.242.238.88:<0.9010.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 950) [rebalance:info,2014-08-19T16:50:35.699,ns_1@10.242.238.88:<0.7078.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:35.699,ns_1@10.242.238.88:<0.6920.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:35.699,ns_1@10.242.238.88:<0.9011.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 943) [rebalance:info,2014-08-19T16:50:35.699,ns_1@10.242.238.88:<0.9012.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 941) [rebalance:info,2014-08-19T16:50:35.699,ns_1@10.242.238.88:<0.6822.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:35.699,ns_1@10.242.238.88:<0.9013.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 939) [rebalance:info,2014-08-19T16:50:35.700,ns_1@10.242.238.88:<0.9014.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 951) [rebalance:info,2014-08-19T16:50:35.700,ns_1@10.242.238.88:<0.9016.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 949) [rebalance:info,2014-08-19T16:50:35.700,ns_1@10.242.238.88:<0.6308.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:35.700,ns_1@10.242.238.88:<0.9015.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 945) [rebalance:info,2014-08-19T16:50:35.700,ns_1@10.242.238.88:<0.6766.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:35.700,ns_1@10.242.238.88:<0.6654.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:35.700,ns_1@10.242.238.88:<0.9017.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 947) [rebalance:info,2014-08-19T16:50:35.700,ns_1@10.242.238.88:<0.7197.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:35.700,ns_1@10.242.238.88:<0.7099.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:35.700,ns_1@10.242.238.88:<0.6597.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:35.700,ns_1@10.242.238.88:<0.7253.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:35.700,ns_1@10.242.238.88:<0.7057.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:35.701,ns_1@10.242.238.88:<0.6787.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:35.701,ns_1@10.242.238.88:<0.6443.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:35.701,ns_1@10.242.238.88:<0.6745.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:35.701,ns_1@10.242.238.88:<0.6878.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:35.701,ns_1@10.242.238.88:<0.6899.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:35.701,ns_1@10.242.238.88:<0.6266.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:35.701,ns_1@10.242.238.88:<0.6955.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:35.701,ns_1@10.242.238.88:<0.6287.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:35.701,ns_1@10.242.238.88:<0.6576.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:35.702,ns_1@10.242.238.88:<0.6385.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:35.702,ns_1@10.242.238.88:<0.6632.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:35.702,ns_1@10.242.238.88:<0.6473.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:35.702,ns_1@10.242.238.88:<0.6231.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:35.702,ns_1@10.242.238.88:<0.7022.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:35.702,ns_1@10.242.238.88:<0.7176.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:35.703,ns_1@10.242.238.88:<0.6210.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:35.703,ns_1@10.242.238.88:<0.6364.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:50:35.703,ns_1@10.242.238.88:<0.7163.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_428_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:35.703,ns_1@10.242.238.88:<0.6710.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:35.703,ns_1@10.242.238.88:<0.7155.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:35.703,ns_1@10.242.238.88:<0.6555.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [views:debug,2014-08-19T16:50:35.706,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/407. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:35.706,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",407,active,0} [ns_server:info,2014-08-19T16:50:35.707,ns_1@10.242.238.88:<0.7142.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_684_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:35.707,ns_1@10.242.238.88:<0.7134.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.709,ns_1@10.242.238.88:<0.6984.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_686_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:35.709,ns_1@10.242.238.88:<0.6976.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.713,ns_1@10.242.238.88:<0.6106.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_441_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:35.713,ns_1@10.242.238.88:<0.6098.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.716,ns_1@10.242.238.88:<0.6851.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_432_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:35.716,ns_1@10.242.238.88:<0.6843.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.718,ns_1@10.242.238.88:<0.6342.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_438_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:35.718,ns_1@10.242.238.88:<0.6329.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.719,ns_1@10.242.238.88:<0.6697.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_434_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:35.719,ns_1@10.242.238.88:<0.6689.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.720,ns_1@10.242.238.88:<0.6502.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_692_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:35.721,ns_1@10.242.238.88:<0.6494.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.721,ns_1@10.242.238.88:<0.7009.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_430_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:35.721,ns_1@10.242.238.88:<0.7001.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.721,ns_1@10.242.238.88:<0.7240.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_427_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:35.722,ns_1@10.242.238.88:<0.7232.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.722,ns_1@10.242.238.88:<0.6523.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_436_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:35.722,ns_1@10.242.238.88:<0.6515.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.722,ns_1@10.242.238.88:<0.6928.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_431_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:35.722,ns_1@10.242.238.88:<0.6920.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.722,ns_1@10.242.238.88:<0.6830.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_688_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:35.723,ns_1@10.242.238.88:<0.6822.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.723,ns_1@10.242.238.88:<0.7086.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_429_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:35.723,ns_1@10.242.238.88:<0.7078.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.723,ns_1@10.242.238.88:<0.6774.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_433_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:35.723,ns_1@10.242.238.88:<0.6766.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.724,ns_1@10.242.238.88:<0.6605.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_435_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:35.724,ns_1@10.242.238.88:<0.6597.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.725,ns_1@10.242.238.88:<0.6456.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_437_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:35.725,ns_1@10.242.238.88:<0.6443.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.725,ns_1@10.242.238.88:<0.6316.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_694_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:35.725,ns_1@10.242.238.88:<0.6308.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.725,ns_1@10.242.238.88:<0.7205.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_683_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:35.725,ns_1@10.242.238.88:<0.7197.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.726,ns_1@10.242.238.88:<0.7107.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_940_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.726,ns_1@10.242.238.88:<0.7099.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.726,ns_1@10.242.238.88:<0.6662.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_690_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:35.726,ns_1@10.242.238.88:<0.6654.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.727,ns_1@10.242.238.88:<0.7065.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_685_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:35.727,ns_1@10.242.238.88:<0.7057.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.727,ns_1@10.242.238.88:<0.6274.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_439_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:35.727,ns_1@10.242.238.88:<0.6266.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.727,ns_1@10.242.238.88:<0.6753.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_689_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:35.728,ns_1@10.242.238.88:<0.6745.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.730,ns_1@10.242.238.88:<0.6907.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_687_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:35.730,ns_1@10.242.238.88:<0.6899.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.730,ns_1@10.242.238.88:<0.7261.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_938_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.730,ns_1@10.242.238.88:<0.7253.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.730,ns_1@10.242.238.88:<0.6886.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_943_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.731,ns_1@10.242.238.88:<0.6878.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.731,ns_1@10.242.238.88:<0.6795.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_944_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.731,ns_1@10.242.238.88:<0.6787.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.731,ns_1@10.242.238.88:<0.6295.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_950_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.731,ns_1@10.242.238.88:<0.6287.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.732,ns_1@10.242.238.88:<0.6584.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_691_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:35.732,ns_1@10.242.238.88:<0.6576.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.732,ns_1@10.242.238.88:<0.6393.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_693_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:35.733,ns_1@10.242.238.88:<0.6385.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.733,ns_1@10.242.238.88:<0.6963.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_942_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.733,ns_1@10.242.238.88:<0.6955.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.733,ns_1@10.242.238.88:<0.6640.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_946_'ns_1@10.242.238.91'">>] [ns_server:info,2014-08-19T16:50:35.733,ns_1@10.242.238.88:<0.6239.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_695_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:35.733,ns_1@10.242.238.88:<0.6632.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:35.734,ns_1@10.242.238.88:<0.6231.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.734,ns_1@10.242.238.88:<0.6481.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_948_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.734,ns_1@10.242.238.88:<0.6473.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.734,ns_1@10.242.238.88:<0.7030.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_941_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.734,ns_1@10.242.238.88:<0.7022.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.735,ns_1@10.242.238.88:<0.6218.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_951_'ns_1@10.242.238.91'">>] [ns_server:info,2014-08-19T16:50:35.735,ns_1@10.242.238.88:<0.7184.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_939_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.735,ns_1@10.242.238.88:<0.6210.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:35.735,ns_1@10.242.238.88:<0.7176.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.736,ns_1@10.242.238.88:<0.6718.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_945_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.736,ns_1@10.242.238.88:<0.6710.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:35.737,ns_1@10.242.238.88:<0.6372.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_949_'ns_1@10.242.238.91'">>] [ns_server:info,2014-08-19T16:50:35.737,ns_1@10.242.238.88:<0.6563.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_947_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.737,ns_1@10.242.238.88:<0.6555.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:35.737,ns_1@10.242.238.88:<0.6364.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:35.750,ns_1@10.242.238.88:<0.7155.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 428 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.9092.1> [ns_server:info,2014-08-19T16:50:35.751,ns_1@10.242.238.88:<0.9092.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 428 to state replica [ns_server:debug,2014-08-19T16:50:35.757,ns_1@10.242.238.88:<0.7134.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 684 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.9101.1> [ns_server:info,2014-08-19T16:50:35.757,ns_1@10.242.238.88:<0.9101.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 684 to state replica [ns_server:debug,2014-08-19T16:50:35.758,ns_1@10.242.238.88:<0.6976.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 686 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.9102.1> [ns_server:info,2014-08-19T16:50:35.761,ns_1@10.242.238.88:<0.9102.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 686 to state replica [ns_server:debug,2014-08-19T16:50:35.771,ns_1@10.242.238.88:<0.6098.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 441 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.9103.1> [ns_server:info,2014-08-19T16:50:35.772,ns_1@10.242.238.88:<0.9103.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 441 to state replica [ns_server:debug,2014-08-19T16:50:35.778,ns_1@10.242.238.88:<0.6843.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 432 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.9104.1> [ns_server:info,2014-08-19T16:50:35.779,ns_1@10.242.238.88:<0.9104.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 432 to state replica [ns_server:debug,2014-08-19T16:50:35.788,ns_1@10.242.238.88:<0.6329.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 438 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.9105.1> [ns_server:debug,2014-08-19T16:50:35.789,ns_1@10.242.238.88:<0.6689.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 434 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.9106.1> [ns_server:info,2014-08-19T16:50:35.791,ns_1@10.242.238.88:<0.9105.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 438 to state replica [ns_server:info,2014-08-19T16:50:35.791,ns_1@10.242.238.88:<0.9106.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 434 to state replica [ns_server:debug,2014-08-19T16:50:35.795,ns_1@10.242.238.88:<0.9092.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_428 [ns_server:debug,2014-08-19T16:50:35.796,ns_1@10.242.238.88:<0.6494.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 692 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.9107.1> [ns_server:debug,2014-08-19T16:50:35.797,ns_1@10.242.238.88:<0.7001.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 430 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.9108.1> [ns_server:debug,2014-08-19T16:50:35.797,ns_1@10.242.238.88:<0.7232.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 427 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.9109.1> [ns_server:debug,2014-08-19T16:50:35.803,ns_1@10.242.238.88:<0.6515.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 436 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.9110.1> [ns_server:debug,2014-08-19T16:50:35.803,ns_1@10.242.238.88:<0.6822.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 688 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.9112.1> [ns_server:debug,2014-08-19T16:50:35.803,ns_1@10.242.238.88:<0.6920.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 431 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.9111.1> [ns_server:debug,2014-08-19T16:50:35.803,ns_1@10.242.238.88:<0.6597.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 435 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.9114.1> [ns_server:debug,2014-08-19T16:50:35.803,ns_1@10.242.238.88:<0.7078.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 429 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.9113.1> [ns_server:debug,2014-08-19T16:50:35.803,ns_1@10.242.238.88:<0.6443.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 437 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.9115.1> [ns_server:debug,2014-08-19T16:50:35.803,ns_1@10.242.238.88:<0.7099.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 940 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.9117.1> [ns_server:info,2014-08-19T16:50:35.804,ns_1@10.242.238.88:<0.9110.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 436 to state replica [ns_server:debug,2014-08-19T16:50:35.804,ns_1@10.242.238.88:<0.6766.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 433 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.9116.1> [ns_server:info,2014-08-19T16:50:35.804,ns_1@10.242.238.88:<0.9109.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 427 to state replica [ns_server:debug,2014-08-19T16:50:35.804,ns_1@10.242.238.88:<0.7057.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 685 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.9118.1> [ns_server:info,2014-08-19T16:50:35.804,ns_1@10.242.238.88:<0.9108.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 430 to state replica [ns_server:debug,2014-08-19T16:50:35.804,ns_1@10.242.238.88:<0.6308.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 694 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.9119.1> [ns_server:debug,2014-08-19T16:50:35.804,ns_1@10.242.238.88:<0.7197.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 683 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.9120.1> [ns_server:debug,2014-08-19T16:50:35.804,ns_1@10.242.238.88:<0.6654.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 690 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.9121.1> [ns_server:debug,2014-08-19T16:50:35.804,ns_1@10.242.238.88:<0.6745.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 689 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.9122.1> [ns_server:debug,2014-08-19T16:50:35.804,ns_1@10.242.238.88:<0.6899.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 687 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.9124.1> [ns_server:debug,2014-08-19T16:50:35.804,ns_1@10.242.238.88:<0.7253.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 938 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.9123.1> [ns_server:debug,2014-08-19T16:50:35.805,ns_1@10.242.238.88:<0.6266.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 439 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.9125.1> [ns_server:debug,2014-08-19T16:50:35.805,ns_1@10.242.238.88:<0.6878.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 943 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.9126.1> [ns_server:debug,2014-08-19T16:50:35.805,ns_1@10.242.238.88:<0.6576.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 691 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.9127.1> [ns_server:debug,2014-08-19T16:50:35.805,ns_1@10.242.238.88:<0.6287.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 950 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.9128.1> [ns_server:debug,2014-08-19T16:50:35.805,ns_1@10.242.238.88:<0.6955.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 942 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.9129.1> [ns_server:debug,2014-08-19T16:50:35.805,ns_1@10.242.238.88:<0.6385.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 693 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.9130.1> [ns_server:debug,2014-08-19T16:50:35.805,ns_1@10.242.238.88:<0.6787.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 944 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.9131.1> [ns_server:debug,2014-08-19T16:50:35.805,ns_1@10.242.238.88:<0.6632.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 946 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.9132.1> [ns_server:debug,2014-08-19T16:50:35.805,ns_1@10.242.238.88:<0.6231.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 695 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.9133.1> [ns_server:debug,2014-08-19T16:50:35.806,ns_1@10.242.238.88:<0.7022.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 941 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.9140.1> [ns_server:debug,2014-08-19T16:50:35.806,ns_1@10.242.238.88:<0.6473.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 948 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.9141.1> [ns_server:debug,2014-08-19T16:50:35.806,ns_1@10.242.238.88:<0.6210.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 951 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.9142.1> [ns_server:debug,2014-08-19T16:50:35.806,ns_1@10.242.238.88:<0.7176.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 939 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.9143.1> [ns_server:debug,2014-08-19T16:50:35.806,ns_1@10.242.238.88:<0.6555.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 947 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.9146.1> [ns_server:debug,2014-08-19T16:50:35.806,ns_1@10.242.238.88:<0.6364.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 949 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.9148.1> [ns_server:debug,2014-08-19T16:50:35.807,ns_1@10.242.238.88:<0.6710.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 945 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.9147.1> [ns_server:info,2014-08-19T16:50:35.808,ns_1@10.242.238.88:<0.9107.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 692 to state replica [ns_server:debug,2014-08-19T16:50:35.809,ns_1@10.242.238.88:<0.9101.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_684 [rebalance:info,2014-08-19T16:50:35.808,ns_1@10.242.238.88:<0.9092.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[428]}, {checkpoints,[{428,1}]}, {name,<<"rebalance_428">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[428]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"428"}]} [ns_server:info,2014-08-19T16:50:35.809,ns_1@10.242.238.88:<0.9115.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 437 to state replica [ns_server:info,2014-08-19T16:50:35.809,ns_1@10.242.238.88:<0.9111.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 431 to state replica [ns_server:info,2014-08-19T16:50:35.809,ns_1@10.242.238.88:<0.9113.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 429 to state replica [ns_server:info,2014-08-19T16:50:35.809,ns_1@10.242.238.88:<0.9112.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 688 to state replica [ns_server:info,2014-08-19T16:50:35.809,ns_1@10.242.238.88:<0.9114.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 435 to state replica [ns_server:info,2014-08-19T16:50:35.811,ns_1@10.242.238.88:<0.9148.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 949 to state replica [ns_server:info,2014-08-19T16:50:35.811,ns_1@10.242.238.88:<0.9123.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 938 to state replica [ns_server:info,2014-08-19T16:50:35.811,ns_1@10.242.238.88:<0.9140.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 941 to state replica [ns_server:info,2014-08-19T16:50:35.811,ns_1@10.242.238.88:<0.9128.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 950 to state replica [ns_server:info,2014-08-19T16:50:35.811,ns_1@10.242.238.88:<0.9146.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 947 to state replica [ns_server:info,2014-08-19T16:50:35.811,ns_1@10.242.238.88:<0.9143.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 939 to state replica [ns_server:info,2014-08-19T16:50:35.811,ns_1@10.242.238.88:<0.9117.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 940 to state replica [ns_server:info,2014-08-19T16:50:35.811,ns_1@10.242.238.88:<0.9142.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 951 to state replica [ns_server:info,2014-08-19T16:50:35.811,ns_1@10.242.238.88:<0.9147.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 945 to state replica [ns_server:info,2014-08-19T16:50:35.811,ns_1@10.242.238.88:<0.9141.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 948 to state replica [ns_server:info,2014-08-19T16:50:35.811,ns_1@10.242.238.88:<0.9131.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 944 to state replica [ns_server:info,2014-08-19T16:50:35.811,ns_1@10.242.238.88:<0.9126.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 943 to state replica [ns_server:info,2014-08-19T16:50:35.811,ns_1@10.242.238.88:<0.9118.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 685 to state replica [ns_server:info,2014-08-19T16:50:35.811,ns_1@10.242.238.88:<0.9127.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 691 to state replica [ns_server:info,2014-08-19T16:50:35.811,ns_1@10.242.238.88:<0.9124.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 687 to state replica [ns_server:info,2014-08-19T16:50:35.811,ns_1@10.242.238.88:<0.9116.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 433 to state replica [ns_server:info,2014-08-19T16:50:35.811,ns_1@10.242.238.88:<0.9122.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 689 to state replica [ns_server:info,2014-08-19T16:50:35.811,ns_1@10.242.238.88:<0.9130.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 693 to state replica [ns_server:info,2014-08-19T16:50:35.811,ns_1@10.242.238.88:<0.9129.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 942 to state replica [ns_server:info,2014-08-19T16:50:35.811,ns_1@10.242.238.88:<0.9120.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 683 to state replica [ns_server:info,2014-08-19T16:50:35.812,ns_1@10.242.238.88:<0.9121.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 690 to state replica [rebalance:debug,2014-08-19T16:50:35.812,ns_1@10.242.238.88:<0.9092.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9155.1> [ns_server:info,2014-08-19T16:50:35.812,ns_1@10.242.238.88:<0.9119.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 694 to state replica [ns_server:info,2014-08-19T16:50:35.812,ns_1@10.242.238.88:<0.9133.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 695 to state replica [ns_server:info,2014-08-19T16:50:35.812,ns_1@10.242.238.88:<0.9132.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 946 to state replica [ns_server:info,2014-08-19T16:50:35.812,ns_1@10.242.238.88:<0.9125.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 439 to state replica [rebalance:info,2014-08-19T16:50:35.812,ns_1@10.242.238.88:<0.9101.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[684]}, {checkpoints,[{684,1}]}, {name,<<"rebalance_684">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[684]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"684"}]} [rebalance:debug,2014-08-19T16:50:35.813,ns_1@10.242.238.88:<0.9101.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9156.1> [rebalance:info,2014-08-19T16:50:35.817,ns_1@10.242.238.88:<0.9092.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:35.820,ns_1@10.242.238.88:<0.9102.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_686 [rebalance:debug,2014-08-19T16:50:35.822,ns_1@10.242.238.88:<0.9092.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:35.822,ns_1@10.242.238.88:<0.9101.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:35.822,ns_1@10.242.238.88:<0.9092.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:35.823,ns_1@10.242.238.88:<0.9102.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[686]}, {checkpoints,[{686,1}]}, {name,<<"rebalance_686">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[686]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"686"}]} [rebalance:info,2014-08-19T16:50:35.824,ns_1@10.242.238.88:<0.7155.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 428 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:35.824,ns_1@10.242.238.88:<0.9101.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:35.824,ns_1@10.242.238.88:<0.9101.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:debug,2014-08-19T16:50:35.824,ns_1@10.242.238.88:<0.9102.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9157.1> [rebalance:debug,2014-08-19T16:50:35.825,ns_1@10.242.238.88:<0.7163.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:35.825,ns_1@10.242.238.88:<0.9102.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:35.825,ns_1@10.242.238.88:<0.7134.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 684 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:35.827,ns_1@10.242.238.88:<0.9102.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:debug,2014-08-19T16:50:35.827,ns_1@10.242.238.88:<0.7142.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:35.827,ns_1@10.242.238.88:<0.9102.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:35.828,ns_1@10.242.238.88:<0.9103.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_441 [rebalance:info,2014-08-19T16:50:35.828,ns_1@10.242.238.88:<0.6976.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 686 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:info,2014-08-19T16:50:35.829,ns_1@10.242.238.88:<0.7163.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_428_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.829,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 428 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:35.829,ns_1@10.242.238.88:<0.9161.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 428 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:info,2014-08-19T16:50:35.829,ns_1@10.242.238.88:<0.9103.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[441]}, {checkpoints,[{441,1}]}, {name,<<"rebalance_441">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[441]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"441"}]} [rebalance:debug,2014-08-19T16:50:35.830,ns_1@10.242.238.88:<0.6984.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:50:35.830,ns_1@10.242.238.88:<0.9103.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9163.1> [ns_server:info,2014-08-19T16:50:35.831,ns_1@10.242.238.88:<0.7142.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_684_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.831,ns_1@10.242.238.88:<0.9103.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:35.833,ns_1@10.242.238.88:<0.9103.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:35.833,ns_1@10.242.238.88:<0.9103.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:50:35.834,ns_1@10.242.238.88:<0.6984.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_686_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.834,ns_1@10.242.238.88:<0.6098.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 441 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:35.836,ns_1@10.242.238.88:<0.6106.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.842,ns_1@10.242.238.88:<0.9105.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_438 [ns_server:info,2014-08-19T16:50:35.843,ns_1@10.242.238.88:<0.6106.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_441_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.843,ns_1@10.242.238.88:<0.9105.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[438]}, {checkpoints,[{438,1}]}, {name,<<"rebalance_438">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[438]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"438"}]} [rebalance:debug,2014-08-19T16:50:35.844,ns_1@10.242.238.88:<0.9105.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9169.1> [rebalance:info,2014-08-19T16:50:35.845,ns_1@10.242.238.88:<0.9105.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:35.846,ns_1@10.242.238.88:<0.9105.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:35.847,ns_1@10.242.238.88:<0.9105.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:35.847,ns_1@10.242.238.88:<0.6329.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 438 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:35.848,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:35.849,ns_1@10.242.238.88:<0.6342.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.850,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{428, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.851,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:35.852,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.852,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:35.854,ns_1@10.242.238.88:<0.6342.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_438_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:35.854,ns_1@10.242.238.88:<0.9104.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_432 [rebalance:info,2014-08-19T16:50:35.855,ns_1@10.242.238.88:<0.9104.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[432]}, {checkpoints,[{432,1}]}, {name,<<"rebalance_432">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[432]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"432"}]} [rebalance:debug,2014-08-19T16:50:35.856,ns_1@10.242.238.88:<0.9104.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9180.1> [rebalance:info,2014-08-19T16:50:35.856,ns_1@10.242.238.88:<0.9104.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:35.859,ns_1@10.242.238.88:<0.9104.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:35.859,ns_1@10.242.238.88:<0.9104.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:35.859,ns_1@10.242.238.88:<0.6843.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 432 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:35.860,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 428 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:35.861,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 428) [rebalance:debug,2014-08-19T16:50:35.861,ns_1@10.242.238.88:<0.6851.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.861,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:35.862,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 684 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:35.862,ns_1@10.242.238.88:<0.9183.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 684 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:info,2014-08-19T16:50:35.864,ns_1@10.242.238.88:<0.6851.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_432_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:35.870,ns_1@10.242.238.88:<0.9106.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_434 [ns_server:debug,2014-08-19T16:50:35.876,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.877,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:35.878,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{684, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.879,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.881,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:35.884,ns_1@10.242.238.88:<0.9106.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[434]}, {checkpoints,[{434,1}]}, {name,<<"rebalance_434">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[434]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"434"}]} [rebalance:debug,2014-08-19T16:50:35.884,ns_1@10.242.238.88:<0.9106.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9194.1> [rebalance:info,2014-08-19T16:50:35.885,ns_1@10.242.238.88:<0.9106.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:35.887,ns_1@10.242.238.88:<0.9106.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:35.887,ns_1@10.242.238.88:<0.9106.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:35.888,ns_1@10.242.238.88:<0.6689.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 434 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:35.890,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 405. Nacking mccouch update. [rebalance:debug,2014-08-19T16:50:35.890,ns_1@10.242.238.88:<0.6697.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [views:debug,2014-08-19T16:50:35.890,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/405. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:35.890,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",405,active,0} [rebalance:info,2014-08-19T16:50:35.891,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 684 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:35.891,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 684) [ns_server:debug,2014-08-19T16:50:35.892,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:35.892,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 686 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:35.892,ns_1@10.242.238.88:<0.9197.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 686 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:35.892,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,851,668,540,485,174,902,774,719,591,408,280,953,825,642,514,459, 148,876,693,565,510,382,1004,927,799,744,616,433,250,122,978,850,667,539,484, 356,901,773,718,590,407,224,952,824,641,513,458,330,875,692,564,509,198,1003, 926,798,743,615,432,304,977,849,666,538,483,172,900,772,717,589,406,278,951, 823,640,512,457,146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976, 848,665,537,482,354,899,771,716,588,405,222,950,822,767,639,456,328,873,690, 562,507,196,1001,924,796,741,613,430,302,975,847,664,536,481,170,898,770,715, 587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000,923,795,740,612, 429,246,118,974,846,663,535,480,352,897,769,714,586,220,948,820,765,637,454, 326,999,871,688,560,505,194,922,794,739,611,428,300,973,845,662,534,479,168, 896,768,713,585,402,274,947,819,764,636,453,142,998,870,687,559,504,376,921, 793,738,610,427,244,116,972,844,661,533,478,350,895,712,584,218,1023,946,818, 763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660, 532,477,166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557, 502,374,919,791,736,608,425,242,114,970,842,659,531,476,348,893,710,582,216, 1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296, 969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994, 866,683,555,500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891, 708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630, 447,136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527, 472,344,889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186, 914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939, 811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964, 836,653,525,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678, 550,495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392, 264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,417, 234,962,834,651,523,468,340,885,702,574,208,1013,936,808,753,625,442,314,987, 859,676,548,493,182,910,782,727,599,416,288,961,833,650,522,467,156,884,701, 573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781,726, 598,415,232,960,832,649,521,466,338,883,700,572,206,1011,934,806,751,623,440, 312,985,857,674,546,491,180,908,780,725,597,414,286,959,831,648,520,465,154, 882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,230,830,519,464, 698,1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569,258,803,748, 437,126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853,542,487, 176,776,721,410,955,644,878,567,256,801,746,435,124,980,669,358,903,592,226, 826,515,460,694,1005,928,617,306] [ns_server:info,2014-08-19T16:50:35.893,ns_1@10.242.238.88:<0.6697.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_434_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:35.893,ns_1@10.242.238.88:<0.9128.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_950 [rebalance:info,2014-08-19T16:50:35.894,ns_1@10.242.238.88:<0.9128.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[950]}, {checkpoints,[{950,1}]}, {name,<<"rebalance_950">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[950]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"950"}]} [rebalance:debug,2014-08-19T16:50:35.895,ns_1@10.242.238.88:<0.9128.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9200.1> [rebalance:info,2014-08-19T16:50:35.895,ns_1@10.242.238.88:<0.9128.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:35.897,ns_1@10.242.238.88:<0.9128.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:35.897,ns_1@10.242.238.88:<0.9128.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:35.899,ns_1@10.242.238.88:<0.6287.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 950 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:35.908,ns_1@10.242.238.88:<0.6295.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.908,ns_1@10.242.238.88:<0.9148.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_949 [ns_server:debug,2014-08-19T16:50:35.909,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.910,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:35.910,ns_1@10.242.238.88:<0.9148.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[949]}, {checkpoints,[{949,1}]}, {name,<<"rebalance_949">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[949]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"949"}]} [ns_server:debug,2014-08-19T16:50:35.910,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:35.910,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{686, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:35.911,ns_1@10.242.238.88:<0.9148.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9203.1> [ns_server:debug,2014-08-19T16:50:35.911,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:35.912,ns_1@10.242.238.88:<0.9148.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:50:35.912,ns_1@10.242.238.88:<0.6295.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_950_'ns_1@10.242.238.90'">>] [rebalance:debug,2014-08-19T16:50:35.914,ns_1@10.242.238.88:<0.9148.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:35.914,ns_1@10.242.238.88:<0.9148.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:35.916,ns_1@10.242.238.88:<0.6364.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 949 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:35.917,ns_1@10.242.238.88:<0.6372.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:35.920,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 686 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:info,2014-08-19T16:50:35.920,ns_1@10.242.238.88:<0.6372.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_949_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:35.921,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 686) [ns_server:debug,2014-08-19T16:50:35.921,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:35.921,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 441 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:35.922,ns_1@10.242.238.88:<0.9216.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 441 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:35.922,ns_1@10.242.238.88:<0.9141.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_948 [rebalance:info,2014-08-19T16:50:35.923,ns_1@10.242.238.88:<0.9141.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[948]}, {checkpoints,[{948,1}]}, {name,<<"rebalance_948">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[948]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"948"}]} [rebalance:debug,2014-08-19T16:50:35.924,ns_1@10.242.238.88:<0.9141.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9217.1> [rebalance:info,2014-08-19T16:50:35.925,ns_1@10.242.238.88:<0.9141.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:35.927,ns_1@10.242.238.88:<0.9141.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:35.927,ns_1@10.242.238.88:<0.9141.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:35.928,ns_1@10.242.238.88:<0.6473.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 948 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:35.934,ns_1@10.242.238.88:<0.6481.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.934,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.934,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.934,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:35.935,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.935,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{441, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.937,ns_1@10.242.238.88:<0.9125.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_439 [ns_server:info,2014-08-19T16:50:35.938,ns_1@10.242.238.88:<0.6481.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_948_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:35.939,ns_1@10.242.238.88:<0.9125.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[439]}, {checkpoints,[{439,1}]}, {name,<<"rebalance_439">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[439]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"439"}]} [rebalance:debug,2014-08-19T16:50:35.939,ns_1@10.242.238.88:<0.9125.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9227.1> [rebalance:info,2014-08-19T16:50:35.940,ns_1@10.242.238.88:<0.9125.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:35.942,ns_1@10.242.238.88:<0.9125.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:35.942,ns_1@10.242.238.88:<0.9125.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:35.943,ns_1@10.242.238.88:<0.6266.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 439 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:35.944,ns_1@10.242.238.88:<0.6274.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:35.947,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 441 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:35.947,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 441) [ns_server:info,2014-08-19T16:50:35.948,ns_1@10.242.238.88:<0.6274.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_439_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:35.948,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:35.948,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 438 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:35.948,ns_1@10.242.238.88:<0.9233.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 438 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [views:debug,2014-08-19T16:50:35.949,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/405. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:35.949,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",405,active,0} [ns_server:debug,2014-08-19T16:50:35.953,ns_1@10.242.238.88:<0.9147.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_945 [rebalance:info,2014-08-19T16:50:35.956,ns_1@10.242.238.88:<0.9147.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[945]}, {checkpoints,[{945,1}]}, {name,<<"rebalance_945">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[945]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"945"}]} [rebalance:debug,2014-08-19T16:50:35.957,ns_1@10.242.238.88:<0.9147.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9234.1> [rebalance:info,2014-08-19T16:50:35.958,ns_1@10.242.238.88:<0.9147.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:35.959,ns_1@10.242.238.88:<0.9147.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:35.959,ns_1@10.242.238.88:<0.9147.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:35.960,ns_1@10.242.238.88:<0.6710.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 945 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:35.960,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.962,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:35.962,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{438, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:35.962,ns_1@10.242.238.88:<0.6718.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.962,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.963,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:35.966,ns_1@10.242.238.88:<0.6718.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_945_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:35.967,ns_1@10.242.238.88:<0.9129.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_942 [rebalance:info,2014-08-19T16:50:35.970,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 438 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:35.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 438) [rebalance:info,2014-08-19T16:50:35.970,ns_1@10.242.238.88:<0.9129.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[942]}, {checkpoints,[{942,1}]}, {name,<<"rebalance_942">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[942]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"942"}]} [ns_server:debug,2014-08-19T16:50:35.971,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:debug,2014-08-19T16:50:35.971,ns_1@10.242.238.88:<0.9129.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9245.1> [rebalance:info,2014-08-19T16:50:35.971,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 432 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:35.971,ns_1@10.242.238.88:<0.9247.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 432 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:info,2014-08-19T16:50:35.972,ns_1@10.242.238.88:<0.9129.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:35.973,ns_1@10.242.238.88:<0.9129.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:35.973,ns_1@10.242.238.88:<0.9129.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:35.974,ns_1@10.242.238.88:<0.6955.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 942 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:35.984,ns_1@10.242.238.88:<0.9127.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_691 [rebalance:info,2014-08-19T16:50:35.985,ns_1@10.242.238.88:<0.9127.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[691]}, {checkpoints,[{691,1}]}, {name,<<"rebalance_691">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[691]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"691"}]} [rebalance:debug,2014-08-19T16:50:35.986,ns_1@10.242.238.88:<0.9127.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9249.1> [rebalance:info,2014-08-19T16:50:35.987,ns_1@10.242.238.88:<0.9127.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:35.987,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:35.987,ns_1@10.242.238.88:<0.6963.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:35.988,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:35.988,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:35.988,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{432, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:35.988,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:35.989,ns_1@10.242.238.88:<0.9127.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:35.989,ns_1@10.242.238.88:<0.9127.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:35.990,ns_1@10.242.238.88:<0.6576.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 691 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:info,2014-08-19T16:50:35.991,ns_1@10.242.238.88:<0.6963.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_942_'ns_1@10.242.238.90'">>] [rebalance:debug,2014-08-19T16:50:35.991,ns_1@10.242.238.88:<0.6584.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:35.994,ns_1@10.242.238.88:<0.6584.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_691_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:35.994,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 432 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:35.995,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 432) [ns_server:debug,2014-08-19T16:50:35.996,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:35.996,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 434 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:35.997,ns_1@10.242.238.88:<0.9263.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 434 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:36.000,ns_1@10.242.238.88:<0.9118.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_685 [rebalance:info,2014-08-19T16:50:36.002,ns_1@10.242.238.88:<0.9118.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[685]}, {checkpoints,[{685,1}]}, {name,<<"rebalance_685">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[685]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"685"}]} [rebalance:debug,2014-08-19T16:50:36.003,ns_1@10.242.238.88:<0.9118.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9264.1> [rebalance:info,2014-08-19T16:50:36.004,ns_1@10.242.238.88:<0.9118.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:36.005,ns_1@10.242.238.88:<0.9118.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.005,ns_1@10.242.238.88:<0.9118.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.006,ns_1@10.242.238.88:<0.7057.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 685 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:36.008,ns_1@10.242.238.88:<0.7065.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:36.009,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.009,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.010,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.010,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{434, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.010,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:36.012,ns_1@10.242.238.88:<0.7065.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_685_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:36.014,ns_1@10.242.238.88:<0.9107.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_692 [rebalance:info,2014-08-19T16:50:36.015,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 434 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:50:36.016,ns_1@10.242.238.88:<0.9107.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[692]}, {checkpoints,[{692,1}]}, {name,<<"rebalance_692">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[692]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"692"}]} [ns_server:debug,2014-08-19T16:50:36.016,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 434) [rebalance:debug,2014-08-19T16:50:36.016,ns_1@10.242.238.88:<0.9107.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9276.1> [rebalance:info,2014-08-19T16:50:36.017,ns_1@10.242.238.88:<0.9107.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:36.017,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.017,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 950 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:debug,2014-08-19T16:50:36.018,ns_1@10.242.238.88:<0.9107.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.018,ns_1@10.242.238.88:<0.9278.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 950 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:50:36.018,ns_1@10.242.238.88:<0.9107.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.020,ns_1@10.242.238.88:<0.6494.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 692 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:36.029,ns_1@10.242.238.88:<0.9123.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_938 [rebalance:info,2014-08-19T16:50:36.032,ns_1@10.242.238.88:<0.9123.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[938]}, {checkpoints,[{938,1}]}, {name,<<"rebalance_938">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[938]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"938"}]} [rebalance:debug,2014-08-19T16:50:36.033,ns_1@10.242.238.88:<0.9123.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9279.1> [rebalance:info,2014-08-19T16:50:36.033,ns_1@10.242.238.88:<0.9123.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:36.035,ns_1@10.242.238.88:<0.9123.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.036,ns_1@10.242.238.88:<0.9123.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.036,ns_1@10.242.238.88:<0.7253.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 938 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:36.038,ns_1@10.242.238.88:<0.7261.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:36.041,ns_1@10.242.238.88:<0.7261.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_938_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:36.043,ns_1@10.242.238.88:<0.9112.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_688 [rebalance:info,2014-08-19T16:50:36.044,ns_1@10.242.238.88:<0.9112.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[688]}, {checkpoints,[{688,1}]}, {name,<<"rebalance_688">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[688]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"688"}]} [rebalance:debug,2014-08-19T16:50:36.045,ns_1@10.242.238.88:<0.9112.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9297.1> [ns_server:debug,2014-08-19T16:50:36.045,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.046,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.046,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:36.049,ns_1@10.242.238.88:<0.6502.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:36.049,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.049,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{950, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:36.052,ns_1@10.242.238.88:<0.9112.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:36.053,ns_1@10.242.238.88:<0.9112.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.054,ns_1@10.242.238.88:<0.9112.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:50:36.054,ns_1@10.242.238.88:<0.6502.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_692_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:36.056,ns_1@10.242.238.88:<0.6822.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 688 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:36.058,ns_1@10.242.238.88:<0.6830.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:36.060,ns_1@10.242.238.88:<0.6830.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_688_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:36.061,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 950 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.062,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 950) [ns_server:debug,2014-08-19T16:50:36.063,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.063,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 949 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:36.063,ns_1@10.242.238.88:<0.9311.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 949 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:36.065,ns_1@10.242.238.88:<0.9142.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_951 [rebalance:info,2014-08-19T16:50:36.066,ns_1@10.242.238.88:<0.9142.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[951]}, {checkpoints,[{951,1}]}, {name,<<"rebalance_951">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[951]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"951"}]} [rebalance:debug,2014-08-19T16:50:36.067,ns_1@10.242.238.88:<0.9142.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9312.1> [rebalance:info,2014-08-19T16:50:36.068,ns_1@10.242.238.88:<0.9142.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:36.069,ns_1@10.242.238.88:<0.9142.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.069,ns_1@10.242.238.88:<0.9142.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.070,ns_1@10.242.238.88:<0.6210.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 951 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:36.072,ns_1@10.242.238.88:<0.6218.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:36.076,ns_1@10.242.238.88:<0.9117.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_940 [ns_server:info,2014-08-19T16:50:36.077,ns_1@10.242.238.88:<0.6218.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_951_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:36.078,ns_1@10.242.238.88:<0.9117.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[940]}, {checkpoints,[{940,1}]}, {name,<<"rebalance_940">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[940]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"940"}]} [rebalance:debug,2014-08-19T16:50:36.078,ns_1@10.242.238.88:<0.9117.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9316.1> [rebalance:info,2014-08-19T16:50:36.080,ns_1@10.242.238.88:<0.9117.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:36.081,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:36.082,ns_1@10.242.238.88:<0.9117.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.082,ns_1@10.242.238.88:<0.9117.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:36.082,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.082,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.082,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.082,ns_1@10.242.238.88:<0.7099.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 940 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:36.083,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{949, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:36.084,ns_1@10.242.238.88:<0.7107.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:36.087,ns_1@10.242.238.88:<0.7107.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_940_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:36.087,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 949 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.088,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 949) [ns_server:debug,2014-08-19T16:50:36.089,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.089,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 948 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:36.089,ns_1@10.242.238.88:<0.9328.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 948 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:36.091,ns_1@10.242.238.88:<0.9132.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_946 [rebalance:info,2014-08-19T16:50:36.093,ns_1@10.242.238.88:<0.9132.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[946]}, {checkpoints,[{946,1}]}, {name,<<"rebalance_946">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[946]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"946"}]} [rebalance:debug,2014-08-19T16:50:36.094,ns_1@10.242.238.88:<0.9132.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9329.1> [rebalance:info,2014-08-19T16:50:36.095,ns_1@10.242.238.88:<0.9132.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:36.096,ns_1@10.242.238.88:<0.9132.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.096,ns_1@10.242.238.88:<0.9132.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.097,ns_1@10.242.238.88:<0.6632.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 946 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:36.099,ns_1@10.242.238.88:<0.6640.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:36.103,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 403. Nacking mccouch update. [views:debug,2014-08-19T16:50:36.103,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/403. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:36.103,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",403,active,0} [ns_server:info,2014-08-19T16:50:36.103,ns_1@10.242.238.88:<0.6640.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_946_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:36.106,ns_1@10.242.238.88:<0.9114.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_435 [rebalance:info,2014-08-19T16:50:36.107,ns_1@10.242.238.88:<0.9114.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[435]}, {checkpoints,[{435,1}]}, {name,<<"rebalance_435">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[435]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"435"}]} [ns_server:debug,2014-08-19T16:50:36.106,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,851,668,540,485,174,902,774,719,591,408,280,953,825,642,514,459, 148,876,693,565,510,382,1004,927,799,744,616,433,250,122,978,850,667,539,484, 356,901,773,718,590,407,224,952,824,641,513,458,330,875,692,564,509,198,1003, 926,798,743,615,432,304,977,849,666,538,483,172,900,772,717,589,406,278,951, 823,640,512,457,146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976, 848,665,537,482,354,899,771,716,588,405,222,950,822,767,639,456,328,873,690, 562,507,196,1001,924,796,741,613,430,302,975,847,664,536,481,170,898,770,715, 587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000,923,795,740,612, 429,246,118,974,846,663,535,480,352,897,769,714,586,403,220,948,820,765,637, 454,326,999,871,688,560,505,194,922,794,739,611,428,300,973,845,662,534,479, 168,896,768,713,585,402,274,947,819,764,636,453,142,998,870,687,559,504,376, 921,793,738,610,427,244,116,972,844,661,533,478,350,895,712,584,218,1023,946, 818,763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843, 660,532,477,166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685, 557,502,374,919,791,736,608,425,242,114,970,842,659,531,476,348,893,710,582, 216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424, 296,969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138, 994,866,683,555,500,372,917,789,734,606,423,240,112,968,840,657,529,474,346, 891,708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788, 733,605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758, 630,447,136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655, 527,472,344,889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497, 186,914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016, 939,811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108, 964,836,653,525,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861, 678,550,495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575, 392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600, 417,234,962,834,651,523,468,340,885,702,574,208,1013,936,808,753,625,442,314, 987,859,676,548,493,182,910,782,727,599,416,288,961,833,650,522,467,156,884, 701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781, 726,598,415,232,960,832,649,521,466,338,883,700,572,206,1011,934,806,751,623, 440,312,985,857,674,546,491,180,908,780,725,597,414,286,959,831,648,520,465, 154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,230,830,519, 464,698,1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569,258,803, 748,437,126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853,542, 487,176,776,721,410,955,644,878,567,256,801,746,435,124,980,669,358,903,592, 226,826,515,460,694,1005,928,617,306] [ns_server:debug,2014-08-19T16:50:36.108,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:debug,2014-08-19T16:50:36.108,ns_1@10.242.238.88:<0.9114.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9334.1> [ns_server:debug,2014-08-19T16:50:36.108,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.108,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{948, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.108,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.109,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.109,ns_1@10.242.238.88:<0.9114.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:36.111,ns_1@10.242.238.88:<0.9114.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.111,ns_1@10.242.238.88:<0.9114.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.113,ns_1@10.242.238.88:<0.6597.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 435 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:36.115,ns_1@10.242.238.88:<0.6605.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:36.118,ns_1@10.242.238.88:<0.6605.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_435_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:36.119,ns_1@10.242.238.88:<0.9130.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_693 [rebalance:info,2014-08-19T16:50:36.120,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 948 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.125,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 948) [rebalance:info,2014-08-19T16:50:36.125,ns_1@10.242.238.88:<0.9130.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[693]}, {checkpoints,[{693,1}]}, {name,<<"rebalance_693">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[693]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"693"}]} [ns_server:debug,2014-08-19T16:50:36.125,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.126,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 439 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:36.126,ns_1@10.242.238.88:<0.9345.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 439 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:debug,2014-08-19T16:50:36.126,ns_1@10.242.238.88:<0.9130.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9346.1> [rebalance:info,2014-08-19T16:50:36.127,ns_1@10.242.238.88:<0.9130.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:36.128,ns_1@10.242.238.88:<0.9130.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.128,ns_1@10.242.238.88:<0.9130.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.129,ns_1@10.242.238.88:<0.6385.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 693 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:36.131,ns_1@10.242.238.88:<0.6393.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:36.134,ns_1@10.242.238.88:<0.9140.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_941 [rebalance:info,2014-08-19T16:50:36.136,ns_1@10.242.238.88:<0.9140.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[941]}, {checkpoints,[{941,1}]}, {name,<<"rebalance_941">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[941]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"941"}]} [views:debug,2014-08-19T16:50:36.137,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/403. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:36.137,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",403,active,0} [rebalance:debug,2014-08-19T16:50:36.137,ns_1@10.242.238.88:<0.9140.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9347.1> [ns_server:info,2014-08-19T16:50:36.138,ns_1@10.242.238.88:<0.6393.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_693_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:36.139,ns_1@10.242.238.88:<0.9140.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:36.140,ns_1@10.242.238.88:<0.9140.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.140,ns_1@10.242.238.88:<0.9140.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.141,ns_1@10.242.238.88:<0.7022.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 941 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:36.142,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:36.143,ns_1@10.242.238.88:<0.7030.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:36.143,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.144,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.144,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{439, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.144,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:36.146,ns_1@10.242.238.88:<0.7030.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_941_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:36.150,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 439 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.150,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 439) [ns_server:debug,2014-08-19T16:50:36.151,ns_1@10.242.238.88:<0.9120.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_683 [ns_server:debug,2014-08-19T16:50:36.151,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.151,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 945 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:36.151,ns_1@10.242.238.88:<0.9362.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 945 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:50:36.152,ns_1@10.242.238.88:<0.9120.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[683]}, {checkpoints,[{683,1}]}, {name,<<"rebalance_683">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[683]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"683"}]} [rebalance:debug,2014-08-19T16:50:36.152,ns_1@10.242.238.88:<0.9120.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9363.1> [rebalance:info,2014-08-19T16:50:36.153,ns_1@10.242.238.88:<0.9120.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:36.155,ns_1@10.242.238.88:<0.9120.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.155,ns_1@10.242.238.88:<0.9120.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.156,ns_1@10.242.238.88:<0.7197.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 683 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:36.164,ns_1@10.242.238.88:<0.9133.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_695 [rebalance:info,2014-08-19T16:50:36.165,ns_1@10.242.238.88:<0.9133.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[695]}, {checkpoints,[{695,1}]}, {name,<<"rebalance_695">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[695]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"695"}]} [rebalance:debug,2014-08-19T16:50:36.166,ns_1@10.242.238.88:<0.9133.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9364.1> [rebalance:info,2014-08-19T16:50:36.167,ns_1@10.242.238.88:<0.9133.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:36.169,ns_1@10.242.238.88:<0.9133.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.169,ns_1@10.242.238.88:<0.9133.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.170,ns_1@10.242.238.88:<0.6231.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 695 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:36.170,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:36.170,ns_1@10.242.238.88:<0.7205.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:36.171,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.172,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.172,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{945, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.172,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:36.174,ns_1@10.242.238.88:<0.7205.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_683_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:36.177,ns_1@10.242.238.88:<0.9131.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_944 [rebalance:info,2014-08-19T16:50:36.178,ns_1@10.242.238.88:<0.9131.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[944]}, {checkpoints,[{944,1}]}, {name,<<"rebalance_944">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[944]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"944"}]} [rebalance:debug,2014-08-19T16:50:36.179,ns_1@10.242.238.88:<0.9131.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9383.1> [rebalance:info,2014-08-19T16:50:36.180,ns_1@10.242.238.88:<0.9131.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:36.181,ns_1@10.242.238.88:<0.9131.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.181,ns_1@10.242.238.88:<0.9131.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.182,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 945 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:50:36.182,ns_1@10.242.238.88:<0.6787.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 944 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:36.182,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 945) [ns_server:debug,2014-08-19T16:50:36.183,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.183,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 942 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:36.183,ns_1@10.242.238.88:<0.9392.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 942 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:debug,2014-08-19T16:50:36.184,ns_1@10.242.238.88:<0.6795.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:36.191,ns_1@10.242.238.88:<0.9143.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_939 [rebalance:info,2014-08-19T16:50:36.192,ns_1@10.242.238.88:<0.9143.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[939]}, {checkpoints,[{939,1}]}, {name,<<"rebalance_939">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[939]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"939"}]} [rebalance:debug,2014-08-19T16:50:36.193,ns_1@10.242.238.88:<0.9143.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9393.1> [rebalance:info,2014-08-19T16:50:36.194,ns_1@10.242.238.88:<0.9143.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:36.195,ns_1@10.242.238.88:<0.6239.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:50:36.196,ns_1@10.242.238.88:<0.9143.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [ns_server:info,2014-08-19T16:50:36.196,ns_1@10.242.238.88:<0.6795.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_944_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:36.196,ns_1@10.242.238.88:<0.9143.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.197,ns_1@10.242.238.88:<0.7176.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 939 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:info,2014-08-19T16:50:36.198,ns_1@10.242.238.88:<0.6239.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_695_'ns_1@10.242.238.91'">>] [rebalance:debug,2014-08-19T16:50:36.199,ns_1@10.242.238.88:<0.7184.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:36.202,ns_1@10.242.238.88:<0.7184.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_939_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:36.207,ns_1@10.242.238.88:<0.9113.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_429 [rebalance:info,2014-08-19T16:50:36.208,ns_1@10.242.238.88:<0.9113.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[429]}, {checkpoints,[{429,1}]}, {name,<<"rebalance_429">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[429]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"429"}]} [rebalance:debug,2014-08-19T16:50:36.208,ns_1@10.242.238.88:<0.9113.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9400.1> [rebalance:info,2014-08-19T16:50:36.209,ns_1@10.242.238.88:<0.9113.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:36.211,ns_1@10.242.238.88:<0.9113.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.211,ns_1@10.242.238.88:<0.9113.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.212,ns_1@10.242.238.88:<0.7078.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 429 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:36.213,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:36.214,ns_1@10.242.238.88:<0.7086.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:36.214,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{942, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.215,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.215,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.215,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:36.218,ns_1@10.242.238.88:<0.7086.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_429_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:36.219,ns_1@10.242.238.88:<0.9110.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_436 [rebalance:info,2014-08-19T16:50:36.221,ns_1@10.242.238.88:<0.9110.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[436]}, {checkpoints,[{436,1}]}, {name,<<"rebalance_436">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[436]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"436"}]} [ns_server:debug,2014-08-19T16:50:36.222,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 401. Nacking mccouch update. [views:debug,2014-08-19T16:50:36.222,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/401. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:36.222,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",401,active,0} [rebalance:debug,2014-08-19T16:50:36.222,ns_1@10.242.238.88:<0.9110.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9411.1> [rebalance:info,2014-08-19T16:50:36.223,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 942 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:50:36.223,ns_1@10.242.238.88:<0.9110.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:36.223,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 942) [ns_server:debug,2014-08-19T16:50:36.224,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.224,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 691 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:36.225,ns_1@10.242.238.88:<0.9414.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 691 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:36.224,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,851,668,540,485,174,902,774,719,591,408,280,953,825,642,514,459, 148,876,693,565,510,382,1004,927,799,744,616,433,250,122,978,850,667,539,484, 356,901,773,718,590,407,224,952,824,641,513,458,330,875,692,564,509,198,1003, 926,798,743,615,432,304,977,849,666,538,483,172,900,772,717,589,406,278,951, 823,640,512,457,146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976, 848,665,537,482,354,899,771,716,588,405,222,950,822,767,639,456,328,873,690, 562,507,196,1001,924,796,741,613,430,302,975,847,664,536,481,170,898,770,715, 587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000,923,795,740,612, 429,246,118,974,846,663,535,480,352,897,769,714,586,403,220,948,820,765,637, 454,326,999,871,688,560,505,194,922,794,739,611,428,300,973,845,662,534,479, 168,896,768,713,585,402,274,947,819,764,636,453,142,998,870,687,559,504,376, 921,793,738,610,427,244,116,972,844,661,533,478,350,895,712,584,401,218,1023, 946,818,763,635,452,324,997,869,686,558,503,192,920,792,737,609,426,298,971, 843,660,532,477,166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868, 685,557,502,374,919,791,736,608,425,242,114,970,842,659,531,476,348,893,710, 582,216,1021,944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607, 424,296,969,841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449, 138,994,866,683,555,500,372,917,789,734,606,423,240,112,968,840,657,529,474, 346,891,708,580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916, 788,733,605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813, 758,630,447,136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838, 655,527,472,344,889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552, 497,186,914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266, 1016,939,811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236, 108,964,836,653,525,470,342,887,704,576,210,1015,938,810,755,627,444,316,989, 861,678,550,495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703, 575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728, 600,417,234,962,834,651,523,468,340,885,702,574,208,1013,936,808,753,625,442, 314,987,859,676,548,493,182,910,782,727,599,416,288,961,833,650,522,467,156, 884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909, 781,726,598,415,232,960,832,649,521,466,338,883,700,572,206,1011,934,806,751, 623,440,312,985,857,674,546,491,180,908,780,725,597,414,286,959,831,648,520, 465,154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,230,830, 519,464,698,1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569,258, 803,748,437,126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853, 542,487,176,776,721,410,955,644,878,567,256,801,746,435,124,980,669,358,903, 592,226,826,515,460,694,1005,928,617,306] [rebalance:debug,2014-08-19T16:50:36.226,ns_1@10.242.238.88:<0.9110.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.226,ns_1@10.242.238.88:<0.9110.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.227,ns_1@10.242.238.88:<0.6515.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 436 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:36.229,ns_1@10.242.238.88:<0.6523.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:36.232,ns_1@10.242.238.88:<0.6523.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_436_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:36.235,ns_1@10.242.238.88:<0.9122.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_689 [rebalance:info,2014-08-19T16:50:36.236,ns_1@10.242.238.88:<0.9122.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[689]}, {checkpoints,[{689,1}]}, {name,<<"rebalance_689">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[689]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"689"}]} [rebalance:debug,2014-08-19T16:50:36.236,ns_1@10.242.238.88:<0.9122.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9417.1> [rebalance:info,2014-08-19T16:50:36.237,ns_1@10.242.238.88:<0.9122.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:36.238,ns_1@10.242.238.88:<0.9122.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.238,ns_1@10.242.238.88:<0.9122.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.239,ns_1@10.242.238.88:<0.6745.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 689 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:36.241,ns_1@10.242.238.88:<0.6753.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:36.245,ns_1@10.242.238.88:<0.6753.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_689_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:36.246,ns_1@10.242.238.88:<0.9119.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_694 [ns_server:debug,2014-08-19T16:50:36.247,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.247,ns_1@10.242.238.88:<0.9119.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[694]}, {checkpoints,[{694,1}]}, {name,<<"rebalance_694">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[694]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"694"}]} [ns_server:debug,2014-08-19T16:50:36.248,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.248,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.248,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{691, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:36.248,ns_1@10.242.238.88:<0.9119.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9423.1> [ns_server:debug,2014-08-19T16:50:36.249,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.250,ns_1@10.242.238.88:<0.9119.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:36.252,ns_1@10.242.238.88:<0.9119.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.252,ns_1@10.242.238.88:<0.9119.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.253,ns_1@10.242.238.88:<0.6308.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 694 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:36.255,ns_1@10.242.238.88:<0.6316.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [views:debug,2014-08-19T16:50:36.256,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/401. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:36.256,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",401,active,0} [ns_server:info,2014-08-19T16:50:36.258,ns_1@10.242.238.88:<0.6316.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_694_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:36.258,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 691 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.260,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 691) [ns_server:debug,2014-08-19T16:50:36.260,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.260,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 685 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:36.261,ns_1@10.242.238.88:<0.9433.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 685 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:36.261,ns_1@10.242.238.88:<0.9108.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_430 [rebalance:info,2014-08-19T16:50:36.264,ns_1@10.242.238.88:<0.9108.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[430]}, {checkpoints,[{430,1}]}, {name,<<"rebalance_430">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[430]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"430"}]} [rebalance:debug,2014-08-19T16:50:36.265,ns_1@10.242.238.88:<0.9108.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9434.1> [rebalance:info,2014-08-19T16:50:36.265,ns_1@10.242.238.88:<0.9108.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:36.267,ns_1@10.242.238.88:<0.9108.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.267,ns_1@10.242.238.88:<0.9108.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.268,ns_1@10.242.238.88:<0.7001.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 430 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:36.269,ns_1@10.242.238.88:<0.7009.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:36.272,ns_1@10.242.238.88:<0.7009.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_430_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:36.274,ns_1@10.242.238.88:<0.9115.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_437 [rebalance:info,2014-08-19T16:50:36.276,ns_1@10.242.238.88:<0.9115.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[437]}, {checkpoints,[{437,1}]}, {name,<<"rebalance_437">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[437]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"437"}]} [ns_server:debug,2014-08-19T16:50:36.277,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:36.277,ns_1@10.242.238.88:<0.9115.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9439.1> [ns_server:debug,2014-08-19T16:50:36.277,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.278,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{685, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.278,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.278,ns_1@10.242.238.88:<0.9115.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:36.278,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:36.280,ns_1@10.242.238.88:<0.9115.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.280,ns_1@10.242.238.88:<0.9115.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.281,ns_1@10.242.238.88:<0.6443.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 437 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:36.283,ns_1@10.242.238.88:<0.6456.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:36.284,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 685 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.284,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 685) [ns_server:debug,2014-08-19T16:50:36.285,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.285,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 938 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:36.286,ns_1@10.242.238.88:<0.9448.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 938 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:info,2014-08-19T16:50:36.286,ns_1@10.242.238.88:<0.6456.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_437_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:36.289,ns_1@10.242.238.88:<0.9116.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_433 [rebalance:info,2014-08-19T16:50:36.291,ns_1@10.242.238.88:<0.9116.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[433]}, {checkpoints,[{433,1}]}, {name,<<"rebalance_433">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[433]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"433"}]} [rebalance:debug,2014-08-19T16:50:36.291,ns_1@10.242.238.88:<0.9116.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9456.1> [rebalance:info,2014-08-19T16:50:36.292,ns_1@10.242.238.88:<0.9116.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:36.294,ns_1@10.242.238.88:<0.9116.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.294,ns_1@10.242.238.88:<0.9116.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.295,ns_1@10.242.238.88:<0.6766.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 433 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:36.296,ns_1@10.242.238.88:<0.6774.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:36.300,ns_1@10.242.238.88:<0.6774.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_433_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:36.303,ns_1@10.242.238.88:<0.9121.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_690 [rebalance:info,2014-08-19T16:50:36.304,ns_1@10.242.238.88:<0.9121.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[690]}, {checkpoints,[{690,1}]}, {name,<<"rebalance_690">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[690]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"690"}]} [ns_server:debug,2014-08-19T16:50:36.304,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:36.305,ns_1@10.242.238.88:<0.9121.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9470.1> [ns_server:debug,2014-08-19T16:50:36.305,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.305,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:50:36.306,ns_1@10.242.238.88:<0.9121.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:36.306,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.306,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{938, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:36.309,ns_1@10.242.238.88:<0.9121.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.309,ns_1@10.242.238.88:<0.9121.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.310,ns_1@10.242.238.88:<0.6654.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 690 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:36.312,ns_1@10.242.238.88:<0.6662.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:36.317,ns_1@10.242.238.88:<0.9126.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_943 [ns_server:info,2014-08-19T16:50:36.318,ns_1@10.242.238.88:<0.6662.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_690_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:36.319,ns_1@10.242.238.88:<0.9126.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[943]}, {checkpoints,[{943,1}]}, {name,<<"rebalance_943">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[943]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"943"}]} [rebalance:debug,2014-08-19T16:50:36.320,ns_1@10.242.238.88:<0.9126.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9479.1> [rebalance:info,2014-08-19T16:50:36.320,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 938 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.320,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 938) [rebalance:info,2014-08-19T16:50:36.321,ns_1@10.242.238.88:<0.9126.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:36.321,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.321,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 692 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:36.322,ns_1@10.242.238.88:<0.9482.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 692 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:debug,2014-08-19T16:50:36.322,ns_1@10.242.238.88:<0.9126.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.322,ns_1@10.242.238.88:<0.9126.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.324,ns_1@10.242.238.88:<0.6878.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 943 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:36.330,ns_1@10.242.238.88:<0.9146.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_947 [ns_server:debug,2014-08-19T16:50:36.331,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 399. Nacking mccouch update. [views:debug,2014-08-19T16:50:36.332,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/399. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:36.332,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",399,active,0} [ns_server:debug,2014-08-19T16:50:36.334,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,668,902,774,719,591,408,280,953,825,642,514,459,148,876,693,565, 510,382,1004,927,799,744,616,433,250,122,978,850,667,539,484,356,901,773,718, 590,407,224,952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743,615, 432,304,977,849,666,538,483,172,900,772,717,589,406,278,951,823,640,512,457, 146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976,848,665,537,482, 354,899,771,716,588,405,222,950,822,767,639,456,328,873,690,562,507,196,1001, 924,796,741,613,430,302,975,847,664,536,481,170,898,770,715,587,404,276,949, 821,766,638,455,144,872,689,561,506,378,1000,923,795,740,612,429,246,118,974, 846,663,535,480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871, 688,560,505,194,922,794,739,611,428,300,973,845,662,534,479,168,896,768,713, 585,402,274,947,819,764,636,453,142,998,870,687,559,504,376,921,793,738,610, 427,244,116,972,844,661,533,478,350,895,712,584,401,218,1023,946,818,763,635, 452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660,532,477, 166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374, 919,791,736,608,425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021, 944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969, 841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866, 683,555,500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708, 580,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605, 422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447, 136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472, 344,889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914, 786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811, 756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964,836, 653,525,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678,550, 495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264, 1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,417,234, 962,834,651,523,468,340,885,702,574,208,1013,936,808,753,625,442,314,987,859, 676,548,493,182,910,782,727,599,416,288,961,833,650,522,467,156,884,701,573, 390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781,726,598, 415,232,960,832,649,521,466,338,883,700,572,206,1011,934,806,751,623,440,312, 985,857,674,546,491,180,908,780,725,597,414,286,959,831,648,520,465,154,882, 699,571,388,260,1010,805,750,439,128,984,673,362,907,596,230,830,519,464,698, 1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569,258,803,748,437, 126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853,542,487,176, 776,721,410,955,644,878,567,256,801,746,435,124,980,669,358,903,592,226,826, 515,460,694,1005,928,617,306,851,540,485,174] [rebalance:info,2014-08-19T16:50:36.336,ns_1@10.242.238.88:<0.9146.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[947]}, {checkpoints,[{947,1}]}, {name,<<"rebalance_947">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[947]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"947"}]} [rebalance:debug,2014-08-19T16:50:36.337,ns_1@10.242.238.88:<0.9146.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9483.1> [rebalance:info,2014-08-19T16:50:36.338,ns_1@10.242.238.88:<0.9146.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:36.339,ns_1@10.242.238.88:<0.9146.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.339,ns_1@10.242.238.88:<0.9146.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.340,ns_1@10.242.238.88:<0.6555.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 947 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:36.343,ns_1@10.242.238.88:<0.6886.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:36.343,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.343,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:debug,2014-08-19T16:50:36.344,ns_1@10.242.238.88:<0.6563.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:36.344,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.344,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{692, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.344,ns_1@10.242.238.88:<0.9111.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_431 [ns_server:debug,2014-08-19T16:50:36.345,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.346,ns_1@10.242.238.88:<0.9111.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[431]}, {checkpoints,[{431,1}]}, {name,<<"rebalance_431">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[431]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"431"}]} [ns_server:info,2014-08-19T16:50:36.347,ns_1@10.242.238.88:<0.6886.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_943_'ns_1@10.242.238.90'">>] [rebalance:debug,2014-08-19T16:50:36.347,ns_1@10.242.238.88:<0.9111.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9495.1> [ns_server:info,2014-08-19T16:50:36.348,ns_1@10.242.238.88:<0.6563.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_947_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:36.348,ns_1@10.242.238.88:<0.9111.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:36.350,ns_1@10.242.238.88:<0.9111.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.350,ns_1@10.242.238.88:<0.9111.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.351,ns_1@10.242.238.88:<0.6920.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 431 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:36.351,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 692 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.352,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 692) [ns_server:debug,2014-08-19T16:50:36.352,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.352,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 688 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:36.352,ns_1@10.242.238.88:<0.9499.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 688 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:debug,2014-08-19T16:50:36.353,ns_1@10.242.238.88:<0.6928.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:36.357,ns_1@10.242.238.88:<0.6928.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_431_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:36.361,ns_1@10.242.238.88:<0.9109.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_427 [rebalance:info,2014-08-19T16:50:36.362,ns_1@10.242.238.88:<0.9109.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[427]}, {checkpoints,[{427,1}]}, {name,<<"rebalance_427">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[427]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"427"}]} [rebalance:debug,2014-08-19T16:50:36.363,ns_1@10.242.238.88:<0.9109.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9502.1> [rebalance:info,2014-08-19T16:50:36.364,ns_1@10.242.238.88:<0.9109.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [views:debug,2014-08-19T16:50:36.365,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/399. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:36.365,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",399,active,0} [rebalance:debug,2014-08-19T16:50:36.366,ns_1@10.242.238.88:<0.9109.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.366,ns_1@10.242.238.88:<0.9109.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:36.367,ns_1@10.242.238.88:<0.7232.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 427 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:36.367,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.368,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.368,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.368,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.368,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{688, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:36.370,ns_1@10.242.238.88:<0.7240.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:36.372,ns_1@10.242.238.88:<0.9124.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_687 [rebalance:info,2014-08-19T16:50:36.373,ns_1@10.242.238.88:<0.9124.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[687]}, {checkpoints,[{687,1}]}, {name,<<"rebalance_687">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[687]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"687"}]} [rebalance:debug,2014-08-19T16:50:36.375,ns_1@10.242.238.88:<0.9124.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.9518.1> [rebalance:info,2014-08-19T16:50:36.375,ns_1@10.242.238.88:<0.9124.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:36.377,ns_1@10.242.238.88:<0.9124.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:36.377,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 688 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:50:36.377,ns_1@10.242.238.88:<0.9124.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:36.378,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 688) [rebalance:info,2014-08-19T16:50:36.378,ns_1@10.242.238.88:<0.6899.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 687 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:36.378,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.378,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 951 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:36.379,ns_1@10.242.238.88:<0.9522.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 951 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:info,2014-08-19T16:50:36.379,ns_1@10.242.238.88:<0.7240.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_427_'ns_1@10.242.238.91'">>] [rebalance:debug,2014-08-19T16:50:36.380,ns_1@10.242.238.88:<0.6907.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:36.383,ns_1@10.242.238.88:<0.6907.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_687_'ns_1@10.242.238.91'">>] [ns_server:debug,2014-08-19T16:50:36.400,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.400,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.400,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.401,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.401,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{951, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:36.407,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 951 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.408,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 951) [ns_server:debug,2014-08-19T16:50:36.409,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.409,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 940 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:36.409,ns_1@10.242.238.88:<0.9536.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 940 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:36.428,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.429,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.429,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.429,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{940, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.429,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.440,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 940 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.440,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 940) [ns_server:debug,2014-08-19T16:50:36.441,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.441,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 946 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:36.441,ns_1@10.242.238.88:<0.9562.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 946 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:36.459,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.460,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.460,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{946, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.460,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.460,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.468,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 946 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.469,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 946) [ns_server:debug,2014-08-19T16:50:36.469,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.470,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 435 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:36.470,ns_1@10.242.238.88:<0.9574.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 435 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:36.487,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.487,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.488,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.488,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{435, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.488,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.501,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 435 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.502,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 435) [ns_server:debug,2014-08-19T16:50:36.502,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.503,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 693 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:36.503,ns_1@10.242.238.88:<0.9585.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 693 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:36.515,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 397. Nacking mccouch update. [views:debug,2014-08-19T16:50:36.515,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/397. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:36.515,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",397,active,0} [ns_server:debug,2014-08-19T16:50:36.518,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,668,902,774,719,591,408,280,953,825,642,514,459,148,876,693,565, 510,382,1004,927,799,744,616,433,250,122,978,850,667,539,484,356,901,773,718, 590,407,224,952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743,615, 432,304,977,849,666,538,483,172,900,772,717,589,406,278,951,823,640,512,457, 146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976,848,665,537,482, 354,899,771,716,588,405,222,950,822,767,639,456,328,873,690,562,507,196,1001, 924,796,741,613,430,302,975,847,664,536,481,170,898,770,715,587,404,276,949, 821,766,638,455,144,872,689,561,506,378,1000,923,795,740,612,429,246,118,974, 846,663,535,480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871, 688,560,505,194,922,794,739,611,428,300,973,845,662,534,479,168,896,768,713, 585,402,274,947,819,764,636,453,142,998,870,687,559,504,376,921,793,738,610, 427,244,116,972,844,661,533,478,350,895,712,584,401,218,1023,946,818,763,635, 452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660,532,477, 166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374, 919,791,736,608,425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021, 944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969, 841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866, 683,555,500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708, 580,397,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630, 447,136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527, 472,344,889,706,578,212,1017,940,812,757,629,446,318,991,863,680,552,497,186, 914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939, 811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964, 836,653,525,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861,678, 550,495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392, 264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,417, 234,962,834,651,523,468,340,885,702,574,208,1013,936,808,753,625,442,314,987, 859,676,548,493,182,910,782,727,599,416,288,961,833,650,522,467,156,884,701, 573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781,726, 598,415,232,960,832,649,521,466,338,883,700,572,206,1011,934,806,751,623,440, 312,985,857,674,546,491,180,908,780,725,597,414,286,959,831,648,520,465,154, 882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,230,830,519,464, 698,1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569,258,803,748, 437,126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853,542,487, 176,776,721,410,955,644,878,567,256,801,746,435,124,980,669,358,903,592,226, 826,515,460,694,1005,928,617,306,851,540,485,174] [ns_server:debug,2014-08-19T16:50:36.519,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{693, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.519,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.520,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.520,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.521,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.527,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 693 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.528,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 693) [ns_server:debug,2014-08-19T16:50:36.528,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.528,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 941 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:36.529,ns_1@10.242.238.88:<0.9596.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 941 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:36.547,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.548,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.548,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.548,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{941, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.548,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:50:36.549,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/397. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:36.549,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",397,active,0} [rebalance:info,2014-08-19T16:50:36.557,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 941 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.558,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 941) [ns_server:debug,2014-08-19T16:50:36.558,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.559,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 683 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:36.559,ns_1@10.242.238.88:<0.9607.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 683 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:36.579,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.579,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.580,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.580,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{683, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.580,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.586,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 683 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.587,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 683) [ns_server:debug,2014-08-19T16:50:36.588,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.588,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 944 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:36.588,ns_1@10.242.238.88:<0.9618.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 944 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:36.607,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.608,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.608,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{944, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.608,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.608,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.623,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 944 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.623,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 944) [ns_server:debug,2014-08-19T16:50:36.624,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.624,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 695 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:36.624,ns_1@10.242.238.88:<0.9644.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 695 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:36.640,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.640,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.641,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{695, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.642,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.642,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.648,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 695 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.649,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 695) [ns_server:debug,2014-08-19T16:50:36.649,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 395. Nacking mccouch update. [views:debug,2014-08-19T16:50:36.649,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/395. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:36.649,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",395,active,0} [ns_server:debug,2014-08-19T16:50:36.650,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.650,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 939 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:36.650,ns_1@10.242.238.88:<0.9654.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 939 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:36.652,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,668,902,774,719,591,408,280,953,825,642,514,459,148,876,693,565, 510,382,1004,927,799,744,616,433,250,122,978,850,667,539,484,356,901,773,718, 590,407,224,952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743,615, 432,304,977,849,666,538,483,172,900,772,717,589,406,278,951,823,640,512,457, 146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976,848,665,537,482, 354,899,771,716,588,405,222,950,822,767,639,456,328,873,690,562,507,196,1001, 924,796,741,613,430,302,975,847,664,536,481,170,898,770,715,587,404,276,949, 821,766,638,455,144,872,689,561,506,378,1000,923,795,740,612,429,246,118,974, 846,663,535,480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871, 688,560,505,194,922,794,739,611,428,300,973,845,662,534,479,168,896,768,713, 585,402,274,947,819,764,636,453,142,998,870,687,559,504,376,921,793,738,610, 427,244,116,972,844,661,533,478,350,895,712,584,401,218,1023,946,818,763,635, 452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660,532,477, 166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374, 919,791,736,608,425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021, 944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969, 841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866, 683,555,500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708, 580,397,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630, 447,136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527, 472,344,889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497, 186,914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016, 939,811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108, 964,836,653,525,470,342,887,704,576,210,1015,938,810,755,627,444,316,989,861, 678,550,495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575, 392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600, 417,234,962,834,651,523,468,340,885,702,574,208,1013,936,808,753,625,442,314, 987,859,676,548,493,182,910,782,727,599,416,288,961,833,650,522,467,156,884, 701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781, 726,598,415,232,960,832,649,521,466,338,883,700,572,206,1011,934,806,751,623, 440,312,985,857,674,546,491,180,908,780,725,597,414,286,959,831,648,520,465, 154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,230,830,519, 464,698,1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569,258,803, 748,437,126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853,542, 487,176,776,721,410,955,644,878,567,256,801,746,435,124,980,669,358,903,592, 226,826,515,460,694,1005,928,617,306,851,540,485,174] [ns_server:debug,2014-08-19T16:50:36.670,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.671,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.671,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.671,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{939, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.671,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.682,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 939 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.683,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 939) [ns_server:debug,2014-08-19T16:50:36.684,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.684,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 429 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:36.684,ns_1@10.242.238.88:<0.9665.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 429 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [views:debug,2014-08-19T16:50:36.699,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/395. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:36.699,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",395,active,0} [ns_server:debug,2014-08-19T16:50:36.702,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.702,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{429, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.702,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.703,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.703,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.710,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 429 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.711,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 429) [ns_server:debug,2014-08-19T16:50:36.712,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.712,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 436 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:36.712,ns_1@10.242.238.88:<0.9676.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 436 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:36.724,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.724,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.724,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{436, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.725,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.726,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.731,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 436 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.732,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 436) [ns_server:debug,2014-08-19T16:50:36.733,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.733,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 689 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:36.733,ns_1@10.242.238.88:<0.9687.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 689 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:36.750,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.751,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.751,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.751,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{689, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.751,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.757,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 689 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.758,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 689) [ns_server:debug,2014-08-19T16:50:36.759,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.759,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 694 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:36.759,ns_1@10.242.238.88:<0.9697.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 694 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:36.774,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.775,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.775,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{694, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.775,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.775,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.786,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 694 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.787,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 694) [ns_server:debug,2014-08-19T16:50:36.789,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.789,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 430 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:36.789,ns_1@10.242.238.88:<0.9722.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 430 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:36.802,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.802,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.802,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.803,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.803,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{430, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:36.811,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 430 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.812,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.812,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 437 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:36.812,ns_1@10.242.238.88:<0.9733.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 437 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:36.814,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 430) [ns_server:debug,2014-08-19T16:50:36.824,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 393. Nacking mccouch update. [views:debug,2014-08-19T16:50:36.825,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/393. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:36.825,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",393,active,0} [ns_server:debug,2014-08-19T16:50:36.828,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.828,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{437, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.828,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,668,902,774,719,591,408,280,953,825,642,514,459,148,876,693,565, 510,382,1004,927,799,744,616,433,250,122,978,850,667,539,484,356,901,773,718, 590,407,224,952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743,615, 432,304,977,849,666,538,483,172,900,772,717,589,406,278,951,823,640,512,457, 146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976,848,665,537,482, 354,899,771,716,588,405,222,950,822,767,639,456,328,873,690,562,507,196,1001, 924,796,741,613,430,302,975,847,664,536,481,170,898,770,715,587,404,276,949, 821,766,638,455,144,872,689,561,506,378,1000,923,795,740,612,429,246,118,974, 846,663,535,480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871, 688,560,505,194,922,794,739,611,428,300,973,845,662,534,479,168,896,768,713, 585,402,274,947,819,764,636,453,142,998,870,687,559,504,376,921,793,738,610, 427,244,116,972,844,661,533,478,350,895,712,584,401,218,1023,946,818,763,635, 452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660,532,477, 166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374, 919,791,736,608,425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021, 944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969, 841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866, 683,555,500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708, 580,397,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630, 447,136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527, 472,344,889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497, 186,914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016, 939,811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108, 964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989, 861,678,550,495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703, 575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728, 600,417,234,962,834,651,523,468,340,885,702,574,208,1013,936,808,753,625,442, 314,987,859,676,548,493,182,910,782,727,599,416,288,961,833,650,522,467,156, 884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909, 781,726,598,415,232,960,832,649,521,466,338,883,700,572,206,1011,934,806,751, 623,440,312,985,857,674,546,491,180,908,780,725,597,414,286,959,831,648,520, 465,154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,230,830, 519,464,698,1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569,258, 803,748,437,126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853, 542,487,176,776,721,410,955,644,878,567,256,801,746,435,124,980,669,358,903, 592,226,826,515,460,694,1005,928,617,306,851,540,485,174] [ns_server:debug,2014-08-19T16:50:36.830,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.830,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.830,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.839,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 437 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.840,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 437) [ns_server:debug,2014-08-19T16:50:36.840,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.840,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 433 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:36.840,ns_1@10.242.238.88:<0.9744.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 433 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:36.855,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.857,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.857,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{433, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.857,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.858,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.865,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 433 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.865,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 433) [ns_server:debug,2014-08-19T16:50:36.866,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.866,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 690 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:36.866,ns_1@10.242.238.88:<0.9755.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 690 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:36.882,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.883,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.883,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.883,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.883,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{690, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:36.890,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 690 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.891,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 690) [ns_server:debug,2014-08-19T16:50:36.892,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.892,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 943 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:36.892,ns_1@10.242.238.88:<0.9766.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 943 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [views:debug,2014-08-19T16:50:36.908,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/393. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:36.908,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",393,active,0} [ns_server:debug,2014-08-19T16:50:36.922,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.923,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{943, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.923,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.923,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.924,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.929,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 943 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.930,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 943) [ns_server:debug,2014-08-19T16:50:36.931,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.931,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 947 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:36.931,ns_1@10.242.238.88:<0.9777.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 947 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:36.960,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.961,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{947, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:36.961,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.961,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.961,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:36.971,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 947 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:36.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 947) [ns_server:debug,2014-08-19T16:50:36.972,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:36.972,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 431 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:36.972,ns_1@10.242.238.88:<0.9788.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 431 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:36.991,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.991,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:36.992,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.992,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:36.992,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{431, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:36.998,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 431 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:37.000,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 431) [ns_server:debug,2014-08-19T16:50:37.000,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:37.000,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 427 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:37.000,ns_1@10.242.238.88:<0.9807.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 427 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:37.013,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:37.014,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:37.015,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:37.015,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:37.015,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{427, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:37.025,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 427 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:37.025,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 427) [ns_server:debug,2014-08-19T16:50:37.026,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:37.026,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 687 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:37.026,ns_1@10.242.238.88:<0.9823.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 687 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:37.052,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:37.053,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:37.053,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:37.053,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{687, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:37.054,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:37.060,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 687 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:37.061,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 687) [ns_server:debug,2014-08-19T16:50:37.062,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{compact,'ns_1@10.242.238.88'}] [ns_server:debug,2014-08-19T16:50:37.063,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"default">>] [ns_server:debug,2014-08-19T16:50:37.064,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:compact_next_bucket:1453]Going to spawn bucket compaction with forced view compaction for bucket default [ns_server:debug,2014-08-19T16:50:37.064,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:compact_next_bucket:1482]Spawned 'uninhibited' compaction for default [ns_server:info,2014-08-19T16:50:37.067,ns_1@10.242.238.88:<0.9835.1>:compaction_daemon:check_all_dbs_exist:1611]Skipping compaction of bucket `default` since at least database `default/0` seems to be missing. [ns_server:debug,2014-08-19T16:50:37.067,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:50:37.067,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:handle_info:203]noted compaction done: {compact,'ns_1@10.242.238.88'} [ns_server:debug,2014-08-19T16:50:37.067,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:50:37.070,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{937, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:37.070,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",937, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.9836.1>) [ns_server:debug,2014-08-19T16:50:37.071,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 937) [ns_server:debug,2014-08-19T16:50:37.071,ns_1@10.242.238.88:<0.9837.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:37.071,ns_1@10.242.238.88:<0.9836.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 937 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:37.071,ns_1@10.242.238.88:<0.9842.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 937 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:37.071,ns_1@10.242.238.88:<0.9843.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 937 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:37.075,ns_1@10.242.238.88:<0.9844.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 937 into 'ns_1@10.242.238.89' is <18124.27803.0> [ns_server:debug,2014-08-19T16:50:37.077,ns_1@10.242.238.88:<0.9844.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 937 into 'ns_1@10.242.238.91' is <18126.26540.0> [rebalance:debug,2014-08-19T16:50:37.077,ns_1@10.242.238.88:<0.9836.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 937 is <0.9844.1> [ns_server:debug,2014-08-19T16:50:37.083,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 391. Nacking mccouch update. [views:debug,2014-08-19T16:50:37.083,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/391. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:37.084,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",391,active,0} [ns_server:debug,2014-08-19T16:50:37.086,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,668,902,774,719,591,408,280,953,825,642,514,459,148,876,693,565, 510,382,1004,927,799,744,616,433,250,122,978,850,667,539,484,356,901,773,718, 590,407,224,952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743,615, 432,304,977,849,666,538,483,172,900,772,717,589,406,278,951,823,640,512,457, 146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976,848,665,537,482, 354,899,771,716,588,405,222,950,822,767,639,456,328,873,690,562,507,196,1001, 924,796,741,613,430,302,975,847,664,536,481,170,898,770,715,587,404,276,949, 821,766,638,455,144,872,689,561,506,378,1000,923,795,740,612,429,246,118,974, 846,663,535,480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871, 688,560,505,194,922,794,739,611,428,300,973,845,662,534,479,168,896,768,713, 585,402,274,947,819,764,636,453,142,998,870,687,559,504,376,921,793,738,610, 427,244,116,972,844,661,533,478,350,895,712,584,401,218,1023,946,818,763,635, 452,324,997,869,686,558,503,192,920,792,737,609,426,298,971,843,660,532,477, 166,894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374, 919,791,736,608,425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021, 944,816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969, 841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866, 683,555,500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708, 580,397,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630, 447,136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527, 472,344,889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497, 186,914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016, 939,811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108, 964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989, 861,678,550,495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703, 575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728, 600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753,625, 442,314,987,859,676,548,493,182,910,782,727,599,416,288,961,833,650,522,467, 156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364, 909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,206,1011,934,806, 751,623,440,312,985,857,674,546,491,180,908,780,725,597,414,286,959,831,648, 520,465,154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,230, 830,519,464,698,1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569, 258,803,748,437,126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308, 853,542,487,176,776,721,410,955,644,878,567,256,801,746,435,124,980,669,358, 903,592,226,826,515,460,694,1005,928,617,306,851,540,485,174] [ns_server:debug,2014-08-19T16:50:37.112,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,103284}, tap_estimate, {replica_building,"default",937,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27803.0>, <<"replication_building_937_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:37.121,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,112171}, tap_estimate, {replica_building,"default",937,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26540.0>, <<"replication_building_937_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:37.121,ns_1@10.242.238.88:<0.9845.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26540.0>}, {'ns_1@10.242.238.89',<18124.27803.0>}]) [rebalance:info,2014-08-19T16:50:37.122,ns_1@10.242.238.88:<0.9836.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:37.122,ns_1@10.242.238.88:<0.9836.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 937 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:37.123,ns_1@10.242.238.88:<0.9836.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:37.123,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{937, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:37.127,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{682, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}}] [rebalance:debug,2014-08-19T16:50:37.127,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",682, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']] (<0.9857.1>) [ns_server:debug,2014-08-19T16:50:37.128,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 682) [ns_server:debug,2014-08-19T16:50:37.128,ns_1@10.242.238.88:<0.9858.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:37.128,ns_1@10.242.238.88:<0.9858.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:37.128,ns_1@10.242.238.88:<0.9857.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 682 state change [{'ns_1@10.242.238.91',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:37.128,ns_1@10.242.238.88:<0.9863.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 682 state change: {'ns_1@10.242.238.91',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:37.128,ns_1@10.242.238.88:<0.9864.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 682 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:37.132,ns_1@10.242.238.88:<0.9865.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 682 into 'ns_1@10.242.238.91' is <18126.26546.0> [ns_server:debug,2014-08-19T16:50:37.134,ns_1@10.242.238.88:<0.9865.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 682 into 'ns_1@10.242.238.90' is <18125.24093.0> [rebalance:debug,2014-08-19T16:50:37.134,ns_1@10.242.238.88:<0.9857.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 682 is <0.9865.1> [ns_server:debug,2014-08-19T16:50:37.164,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,155275}, tap_estimate, {replica_building,"default",682,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26546.0>, <<"replication_building_682_'ns_1@10.242.238.91'">>} [views:debug,2014-08-19T16:50:37.167,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/391. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:37.168,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",391,active,0} [ns_server:debug,2014-08-19T16:50:37.179,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,170206}, tap_estimate, {replica_building,"default",682,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24093.0>, <<"replication_building_682_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:37.179,ns_1@10.242.238.88:<0.9866.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.24093.0>}, {'ns_1@10.242.238.91',<18126.26546.0>}]) [rebalance:info,2014-08-19T16:50:37.179,ns_1@10.242.238.88:<0.9857.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:37.180,ns_1@10.242.238.88:<0.9857.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 682 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:37.181,ns_1@10.242.238.88:<0.9857.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:37.181,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{682, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}} [ns_server:debug,2014-08-19T16:50:37.185,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{426, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:37.185,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",426, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.9883.1>) [ns_server:debug,2014-08-19T16:50:37.185,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 426) [ns_server:debug,2014-08-19T16:50:37.186,ns_1@10.242.238.88:<0.9884.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:37.186,ns_1@10.242.238.88:<0.9884.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:37.186,ns_1@10.242.238.88:<0.9883.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 426 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:37.186,ns_1@10.242.238.88:<0.9889.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 426 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:37.186,ns_1@10.242.238.88:<0.9890.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 426 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:37.190,ns_1@10.242.238.88:<0.9891.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 426 into 'ns_1@10.242.238.90' is <18125.24099.0> [ns_server:debug,2014-08-19T16:50:37.191,ns_1@10.242.238.88:<0.9891.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 426 into 'ns_1@10.242.238.89' is <18124.27822.0> [rebalance:debug,2014-08-19T16:50:37.191,ns_1@10.242.238.88:<0.9883.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 426 is <0.9891.1> [ns_server:debug,2014-08-19T16:50:37.221,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,212309}, tap_estimate, {replica_building,"default",426,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24099.0>, <<"replication_building_426_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:37.234,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,225142}, tap_estimate, {replica_building,"default",426,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27822.0>, <<"replication_building_426_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:37.234,ns_1@10.242.238.88:<0.9892.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27822.0>}, {'ns_1@10.242.238.90',<18125.24099.0>}]) [rebalance:info,2014-08-19T16:50:37.234,ns_1@10.242.238.88:<0.9883.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:37.235,ns_1@10.242.238.88:<0.9883.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 426 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:37.235,ns_1@10.242.238.88:<0.9883.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:37.236,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{426, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:37.239,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{936, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:37.239,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",936, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.9904.1>) [ns_server:debug,2014-08-19T16:50:37.240,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 936) [ns_server:debug,2014-08-19T16:50:37.240,ns_1@10.242.238.88:<0.9905.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:37.240,ns_1@10.242.238.88:<0.9905.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:37.240,ns_1@10.242.238.88:<0.9904.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 936 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:37.240,ns_1@10.242.238.88:<0.9910.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 936 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:37.240,ns_1@10.242.238.88:<0.9911.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 936 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:37.244,ns_1@10.242.238.88:<0.9912.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 936 into 'ns_1@10.242.238.89' is <18124.27828.0> [ns_server:debug,2014-08-19T16:50:37.245,ns_1@10.242.238.88:<0.9912.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 936 into 'ns_1@10.242.238.91' is <18126.26565.0> [rebalance:debug,2014-08-19T16:50:37.245,ns_1@10.242.238.88:<0.9904.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 936 is <0.9912.1> [ns_server:debug,2014-08-19T16:50:37.275,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,266207}, tap_estimate, {replica_building,"default",936,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27828.0>, <<"replication_building_936_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:37.288,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,279404}, tap_estimate, {replica_building,"default",936,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26565.0>, <<"replication_building_936_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:37.289,ns_1@10.242.238.88:<0.9913.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26565.0>}, {'ns_1@10.242.238.89',<18124.27828.0>}]) [rebalance:info,2014-08-19T16:50:37.289,ns_1@10.242.238.88:<0.9904.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:37.289,ns_1@10.242.238.88:<0.9904.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 936 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:37.290,ns_1@10.242.238.88:<0.9904.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:37.291,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{936, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:37.294,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{681, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:37.294,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",681, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.9939.1>) [ns_server:debug,2014-08-19T16:50:37.294,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 681) [ns_server:debug,2014-08-19T16:50:37.295,ns_1@10.242.238.88:<0.9940.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:37.295,ns_1@10.242.238.88:<0.9940.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:37.295,ns_1@10.242.238.88:<0.9939.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 681 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:37.295,ns_1@10.242.238.88:<0.9945.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 681 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:37.295,ns_1@10.242.238.88:<0.9946.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 681 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:37.299,ns_1@10.242.238.88:<0.9947.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 681 into 'ns_1@10.242.238.89' is <18124.27847.0> [ns_server:debug,2014-08-19T16:50:37.302,ns_1@10.242.238.88:<0.9947.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 681 into 'ns_1@10.242.238.90' is <18125.24118.0> [rebalance:debug,2014-08-19T16:50:37.302,ns_1@10.242.238.88:<0.9939.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 681 is <0.9947.1> [ns_server:debug,2014-08-19T16:50:37.330,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,321561}, tap_estimate, {replica_building,"default",681,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27847.0>, <<"replication_building_681_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:37.343,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 389. Nacking mccouch update. [views:debug,2014-08-19T16:50:37.343,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/389. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:37.343,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",389,active,0} [ns_server:debug,2014-08-19T16:50:37.344,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,335633}, tap_estimate, {replica_building,"default",681,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24118.0>, <<"replication_building_681_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:37.345,ns_1@10.242.238.88:<0.9948.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.24118.0>}, {'ns_1@10.242.238.89',<18124.27847.0>}]) [rebalance:info,2014-08-19T16:50:37.345,ns_1@10.242.238.88:<0.9939.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [ns_server:debug,2014-08-19T16:50:37.345,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,668,902,591,280,953,825,642,514,459,148,876,693,565,510,382,1004, 927,799,744,616,433,250,122,978,850,667,539,484,356,901,773,718,590,407,224, 952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743,615,432,304,977, 849,666,538,483,172,900,772,717,589,406,278,951,823,640,512,457,146,874,691, 563,508,380,1002,925,797,742,614,431,248,120,976,848,665,537,482,354,899,771, 716,588,405,222,950,822,767,639,456,328,873,690,562,507,196,1001,924,796,741, 613,430,302,975,847,664,536,481,170,898,770,715,587,404,276,949,821,766,638, 455,144,872,689,561,506,378,1000,923,795,740,612,429,246,118,974,846,663,535, 480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871,688,560,505, 194,922,794,739,611,428,300,973,845,662,534,479,168,896,768,713,585,402,274, 947,819,764,636,453,142,998,870,687,559,504,376,921,793,738,610,427,244,116, 972,844,661,533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324,997, 869,686,558,503,192,920,792,737,609,426,298,971,843,660,532,477,166,894,711, 583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791,736, 608,425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761, 633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969,841,658,530, 475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500, 372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214, 1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422,294, 967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136,992, 864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786, 731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756, 628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964,836,653, 525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861,678,550, 495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264, 1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,417,234, 962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753,625,442,314,987, 859,676,548,493,182,910,782,727,599,416,288,961,833,650,522,467,156,884,701, 573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781,726, 598,415,232,960,832,649,521,466,338,883,700,572,389,206,1011,934,806,751,623, 440,312,985,857,674,546,491,180,908,780,725,597,414,286,959,831,648,520,465, 154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,230,830,519, 464,698,1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569,258,803, 748,437,126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853,542, 487,176,776,721,410,955,644,878,567,256,801,746,435,124,980,669,358,903,592, 226,826,515,460,694,1005,928,617,306,851,540,485,174,774,719,408] [rebalance:info,2014-08-19T16:50:37.346,ns_1@10.242.238.88:<0.9939.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 681 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:37.346,ns_1@10.242.238.88:<0.9939.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:37.347,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{681, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:37.350,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{425, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:37.350,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",425, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.9960.1>) [ns_server:debug,2014-08-19T16:50:37.350,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 425) [ns_server:debug,2014-08-19T16:50:37.351,ns_1@10.242.238.88:<0.9961.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:37.351,ns_1@10.242.238.88:<0.9961.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:37.351,ns_1@10.242.238.88:<0.9960.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 425 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:37.351,ns_1@10.242.238.88:<0.9966.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 425 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:37.351,ns_1@10.242.238.88:<0.9967.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 425 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:37.356,ns_1@10.242.238.88:<0.9968.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 425 into 'ns_1@10.242.238.90' is <18125.24138.0> [ns_server:debug,2014-08-19T16:50:37.358,ns_1@10.242.238.88:<0.9968.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 425 into 'ns_1@10.242.238.89' is <18124.27852.0> [rebalance:debug,2014-08-19T16:50:37.358,ns_1@10.242.238.88:<0.9960.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 425 is <0.9968.1> [ns_server:debug,2014-08-19T16:50:37.388,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,379932}, tap_estimate, {replica_building,"default",425,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24138.0>, <<"replication_building_425_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:37.401,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,392878}, tap_estimate, {replica_building,"default",425,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27852.0>, <<"replication_building_425_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:37.402,ns_1@10.242.238.88:<0.9969.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27852.0>}, {'ns_1@10.242.238.90',<18125.24138.0>}]) [rebalance:info,2014-08-19T16:50:37.402,ns_1@10.242.238.88:<0.9960.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:37.403,ns_1@10.242.238.88:<0.9960.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 425 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:37.403,ns_1@10.242.238.88:<0.9960.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:37.404,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{425, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:37.407,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{935, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:37.407,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",935, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.9981.1>) [ns_server:debug,2014-08-19T16:50:37.408,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 935) [ns_server:debug,2014-08-19T16:50:37.408,ns_1@10.242.238.88:<0.9982.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:37.408,ns_1@10.242.238.88:<0.9982.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:37.408,ns_1@10.242.238.88:<0.9981.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 935 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:37.408,ns_1@10.242.238.88:<0.9987.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 935 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:37.408,ns_1@10.242.238.88:<0.9988.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 935 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:37.412,ns_1@10.242.238.88:<0.9989.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 935 into 'ns_1@10.242.238.89' is <18124.27872.0> [ns_server:debug,2014-08-19T16:50:37.415,ns_1@10.242.238.88:<0.9989.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 935 into 'ns_1@10.242.238.91' is <18126.26585.0> [rebalance:debug,2014-08-19T16:50:37.415,ns_1@10.242.238.88:<0.9981.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 935 is <0.9989.1> [views:debug,2014-08-19T16:50:37.426,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/389. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:37.426,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",389,active,0} [ns_server:debug,2014-08-19T16:50:37.443,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,434564}, tap_estimate, {replica_building,"default",935,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27872.0>, <<"replication_building_935_'ns_1@10.242.238.89'">>} [rebalance:info,2014-08-19T16:50:37.446,ns_1@10.242.238.88:<0.9883.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 426 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:37.446,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 426 state to active [rebalance:info,2014-08-19T16:50:37.447,ns_1@10.242.238.88:<0.9883.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 426 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:37.447,ns_1@10.242.238.88:<0.9883.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:37.457,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,448244}, tap_estimate, {replica_building,"default",935,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26585.0>, <<"replication_building_935_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:37.457,ns_1@10.242.238.88:<0.9990.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26585.0>}, {'ns_1@10.242.238.89',<18124.27872.0>}]) [rebalance:info,2014-08-19T16:50:37.457,ns_1@10.242.238.88:<0.9981.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:37.458,ns_1@10.242.238.88:<0.9981.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 935 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:37.458,ns_1@10.242.238.88:<0.9981.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:37.459,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{935, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:37.462,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{680, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:37.463,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",680, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.10006.1>) [ns_server:debug,2014-08-19T16:50:37.463,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 680) [ns_server:debug,2014-08-19T16:50:37.463,ns_1@10.242.238.88:<0.10007.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:37.463,ns_1@10.242.238.88:<0.10007.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:37.463,ns_1@10.242.238.88:<0.10006.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 680 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:37.464,ns_1@10.242.238.88:<0.10012.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 680 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:37.464,ns_1@10.242.238.88:<0.10013.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 680 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:37.469,ns_1@10.242.238.88:<0.10014.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 680 into 'ns_1@10.242.238.89' is <18124.27880.0> [ns_server:debug,2014-08-19T16:50:37.472,ns_1@10.242.238.88:<0.10014.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 680 into 'ns_1@10.242.238.90' is <18125.24152.0> [rebalance:debug,2014-08-19T16:50:37.472,ns_1@10.242.238.88:<0.10006.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 680 is <0.10014.1> [ns_server:debug,2014-08-19T16:50:37.506,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,497863}, tap_estimate, {replica_building,"default",680,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27880.0>, <<"replication_building_680_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:37.514,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,505254}, tap_estimate, {replica_building,"default",680,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24152.0>, <<"replication_building_680_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:37.514,ns_1@10.242.238.88:<0.10015.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.24152.0>}, {'ns_1@10.242.238.89',<18124.27880.0>}]) [rebalance:info,2014-08-19T16:50:37.515,ns_1@10.242.238.88:<0.10006.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:37.515,ns_1@10.242.238.88:<0.10006.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 680 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:37.515,ns_1@10.242.238.88:<0.10006.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:37.516,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{680, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:37.520,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{424, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:37.520,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",424, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.10041.1>) [ns_server:debug,2014-08-19T16:50:37.520,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 424) [ns_server:debug,2014-08-19T16:50:37.520,ns_1@10.242.238.88:<0.10042.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:37.521,ns_1@10.242.238.88:<0.10042.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:37.521,ns_1@10.242.238.88:<0.10041.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 424 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:37.521,ns_1@10.242.238.88:<0.10047.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 424 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:37.521,ns_1@10.242.238.88:<0.10048.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 424 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:37.525,ns_1@10.242.238.88:<0.10049.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 424 into 'ns_1@10.242.238.90' is <18125.24158.0> [ns_server:debug,2014-08-19T16:50:37.527,ns_1@10.242.238.88:<0.10049.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 424 into 'ns_1@10.242.238.89' is <18124.27885.0> [rebalance:debug,2014-08-19T16:50:37.527,ns_1@10.242.238.88:<0.10041.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 424 is <0.10049.1> [ns_server:debug,2014-08-19T16:50:37.555,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,546622}, tap_estimate, {replica_building,"default",424,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24158.0>, <<"replication_building_424_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:37.570,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,561516}, tap_estimate, {replica_building,"default",424,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27885.0>, <<"replication_building_424_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:37.570,ns_1@10.242.238.88:<0.10050.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27885.0>}, {'ns_1@10.242.238.90',<18125.24158.0>}]) [rebalance:info,2014-08-19T16:50:37.571,ns_1@10.242.238.88:<0.10041.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:37.571,ns_1@10.242.238.88:<0.10041.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 424 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:37.572,ns_1@10.242.238.88:<0.10041.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:37.572,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{424, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:37.576,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{934, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:37.576,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",934, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.10062.1>) [ns_server:debug,2014-08-19T16:50:37.576,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 934) [ns_server:debug,2014-08-19T16:50:37.576,ns_1@10.242.238.88:<0.10063.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:37.576,ns_1@10.242.238.88:<0.10063.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:37.576,ns_1@10.242.238.88:<0.10062.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 934 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:37.577,ns_1@10.242.238.88:<0.10068.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 934 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:37.577,ns_1@10.242.238.88:<0.10069.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 934 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:37.580,ns_1@10.242.238.88:<0.10070.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 934 into 'ns_1@10.242.238.89' is <18124.27891.0> [ns_server:debug,2014-08-19T16:50:37.583,ns_1@10.242.238.88:<0.10070.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 934 into 'ns_1@10.242.238.91' is <18126.26605.0> [rebalance:debug,2014-08-19T16:50:37.583,ns_1@10.242.238.88:<0.10062.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 934 is <0.10070.1> [ns_server:debug,2014-08-19T16:50:37.593,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 387. Nacking mccouch update. [views:debug,2014-08-19T16:50:37.594,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/387. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:37.594,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",387,active,0} [ns_server:debug,2014-08-19T16:50:37.596,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,668,902,591,280,953,825,642,514,459,148,876,693,565,510,382,1004, 927,799,744,616,433,250,122,978,850,667,539,484,356,901,773,718,590,407,224, 952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743,615,432,304,977, 849,666,538,483,172,900,772,717,589,406,278,951,823,640,512,457,146,874,691, 563,508,380,1002,925,797,742,614,431,248,120,976,848,665,537,482,354,899,771, 716,588,405,222,950,822,767,639,456,328,873,690,562,507,196,1001,924,796,741, 613,430,302,975,847,664,536,481,170,898,770,715,587,404,276,949,821,766,638, 455,144,872,689,561,506,378,1000,923,795,740,612,429,246,118,974,846,663,535, 480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871,688,560,505, 194,922,794,739,611,428,300,973,845,662,534,479,168,896,768,713,585,402,274, 947,819,764,636,453,142,998,870,687,559,504,376,921,793,738,610,427,244,116, 972,844,661,533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324,997, 869,686,558,503,192,920,792,737,609,426,298,971,843,660,532,477,166,894,711, 583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791,736, 608,425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761, 633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969,841,658,530, 475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500, 372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214, 1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422,294, 967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136,992, 864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786, 731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756, 628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964,836,653, 525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861,678,550, 495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264, 1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,417,234, 962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753,625,442,314,987, 859,676,548,493,182,910,782,727,599,416,288,961,833,650,522,467,156,884,701, 573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781,726, 598,415,232,960,832,649,521,466,338,883,700,572,389,206,1011,934,806,751,623, 440,312,985,857,674,546,491,180,908,780,725,597,414,286,959,831,648,520,465, 154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,230,830,519, 464,698,387,1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569,258, 803,748,437,126,982,671,360,905,594,228,828,517,462,696,1007,930,619,308,853, 542,487,176,776,721,410,955,644,878,567,256,801,746,435,124,980,669,358,903, 592,226,826,515,460,694,1005,928,617,306,851,540,485,174,774,719,408] [ns_server:debug,2014-08-19T16:50:37.611,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,602938}, tap_estimate, {replica_building,"default",934,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27891.0>, <<"replication_building_934_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:37.625,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,616800}, tap_estimate, {replica_building,"default",934,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26605.0>, <<"replication_building_934_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:37.626,ns_1@10.242.238.88:<0.10071.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26605.0>}, {'ns_1@10.242.238.89',<18124.27891.0>}]) [rebalance:info,2014-08-19T16:50:37.626,ns_1@10.242.238.88:<0.10062.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:37.626,ns_1@10.242.238.88:<0.10062.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 934 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:37.627,ns_1@10.242.238.88:<0.10062.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:37.627,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{934, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:37.631,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{679, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:37.631,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",679, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.10083.1>) [ns_server:debug,2014-08-19T16:50:37.631,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 679) [ns_server:debug,2014-08-19T16:50:37.631,ns_1@10.242.238.88:<0.10084.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:37.632,ns_1@10.242.238.88:<0.10084.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:37.632,ns_1@10.242.238.88:<0.10083.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 679 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:37.632,ns_1@10.242.238.88:<0.10089.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 679 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:37.632,ns_1@10.242.238.88:<0.10090.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 679 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:37.636,ns_1@10.242.238.88:<0.10091.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 679 into 'ns_1@10.242.238.89' is <18124.27896.0> [ns_server:debug,2014-08-19T16:50:37.638,ns_1@10.242.238.88:<0.10091.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 679 into 'ns_1@10.242.238.90' is <18125.24177.0> [rebalance:debug,2014-08-19T16:50:37.638,ns_1@10.242.238.88:<0.10083.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 679 is <0.10091.1> [views:debug,2014-08-19T16:50:37.660,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/387. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:37.661,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",387,active,0} [ns_server:debug,2014-08-19T16:50:37.668,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,659510}, tap_estimate, {replica_building,"default",679,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27896.0>, <<"replication_building_679_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:37.688,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,679733}, tap_estimate, {replica_building,"default",679,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24177.0>, <<"replication_building_679_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:37.689,ns_1@10.242.238.88:<0.10092.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.24177.0>}, {'ns_1@10.242.238.89',<18124.27896.0>}]) [rebalance:info,2014-08-19T16:50:37.689,ns_1@10.242.238.88:<0.10083.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:37.690,ns_1@10.242.238.88:<0.10083.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 679 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:37.690,ns_1@10.242.238.88:<0.10083.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:37.691,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{679, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:37.695,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{423, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:37.695,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",423, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.10104.1>) [ns_server:debug,2014-08-19T16:50:37.695,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 423) [ns_server:debug,2014-08-19T16:50:37.695,ns_1@10.242.238.88:<0.10105.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:37.695,ns_1@10.242.238.88:<0.10105.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:37.696,ns_1@10.242.238.88:<0.10104.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 423 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:37.696,ns_1@10.242.238.88:<0.10110.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 423 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:37.696,ns_1@10.242.238.88:<0.10111.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 423 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:37.700,ns_1@10.242.238.88:<0.10112.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 423 into 'ns_1@10.242.238.90' is <18125.24183.0> [ns_server:debug,2014-08-19T16:50:37.703,ns_1@10.242.238.88:<0.10112.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 423 into 'ns_1@10.242.238.89' is <18124.27915.0> [rebalance:debug,2014-08-19T16:50:37.703,ns_1@10.242.238.88:<0.10104.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 423 is <0.10112.1> [ns_server:debug,2014-08-19T16:50:37.731,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,722604}, tap_estimate, {replica_building,"default",423,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24183.0>, <<"replication_building_423_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:37.745,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,736054}, tap_estimate, {replica_building,"default",423,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27915.0>, <<"replication_building_423_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:37.745,ns_1@10.242.238.88:<0.10113.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27915.0>}, {'ns_1@10.242.238.90',<18125.24183.0>}]) [rebalance:info,2014-08-19T16:50:37.745,ns_1@10.242.238.88:<0.10104.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:37.746,ns_1@10.242.238.88:<0.10104.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 423 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:37.746,ns_1@10.242.238.88:<0.10104.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:37.747,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{423, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:37.750,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{933, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:37.750,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",933, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.10139.1>) [ns_server:debug,2014-08-19T16:50:37.750,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 933) [ns_server:debug,2014-08-19T16:50:37.751,ns_1@10.242.238.88:<0.10140.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:37.751,ns_1@10.242.238.88:<0.10140.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:37.751,ns_1@10.242.238.88:<0.10139.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 933 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:37.751,ns_1@10.242.238.88:<0.10145.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 933 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:37.751,ns_1@10.242.238.88:<0.10146.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 933 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:37.755,ns_1@10.242.238.88:<0.10147.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 933 into 'ns_1@10.242.238.89' is <18124.27921.0> [ns_server:debug,2014-08-19T16:50:37.758,ns_1@10.242.238.88:<0.10147.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 933 into 'ns_1@10.242.238.91' is <18126.26625.0> [rebalance:debug,2014-08-19T16:50:37.758,ns_1@10.242.238.88:<0.10139.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 933 is <0.10147.1> [ns_server:debug,2014-08-19T16:50:37.788,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,779519}, tap_estimate, {replica_building,"default",933,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27921.0>, <<"replication_building_933_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:37.791,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 385. Nacking mccouch update. [views:debug,2014-08-19T16:50:37.791,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/385. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:37.792,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",385,active,0} [ns_server:debug,2014-08-19T16:50:37.794,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,668,902,591,280,953,825,642,514,459,148,876,693,565,510,382,1004, 927,799,744,616,433,250,122,978,850,667,539,484,356,901,773,718,590,407,224, 952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743,615,432,304,977, 849,666,538,483,172,900,772,717,589,406,278,951,823,640,512,457,146,874,691, 563,508,380,1002,925,797,742,614,431,248,120,976,848,665,537,482,354,899,771, 716,588,405,222,950,822,767,639,456,328,873,690,562,507,196,1001,924,796,741, 613,430,302,975,847,664,536,481,170,898,770,715,587,404,276,949,821,766,638, 455,144,872,689,561,506,378,1000,923,795,740,612,429,246,118,974,846,663,535, 480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871,688,560,505, 194,922,794,739,611,428,300,973,845,662,534,479,168,896,768,713,585,402,274, 947,819,764,636,453,142,998,870,687,559,504,376,921,793,738,610,427,244,116, 972,844,661,533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324,997, 869,686,558,503,192,920,792,737,609,426,298,971,843,660,532,477,166,894,711, 583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791,736, 608,425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761, 633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969,841,658,530, 475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500, 372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214, 1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422,294, 967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136,992, 864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786, 731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756, 628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964,836,653, 525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861,678,550, 495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264, 1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,417,234, 962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753,625,442,314,987, 859,676,548,493,182,910,782,727,599,416,288,961,833,650,522,467,156,884,701, 573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781,726, 598,415,232,960,832,649,521,466,338,883,700,572,389,206,1011,934,806,751,623, 440,312,985,857,674,546,491,180,908,780,725,597,414,286,959,831,648,520,465, 154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,230,830,519, 464,698,387,1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569,258, 803,748,437,126,982,671,360,905,594,228,828,517,462,696,385,1007,930,619,308, 853,542,487,176,776,721,410,955,644,878,567,256,801,746,435,124,980,669,358, 903,592,226,826,515,460,694,1005,928,617,306,851,540,485,174,774,719,408] [ns_server:debug,2014-08-19T16:50:37.800,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,791600}, tap_estimate, {replica_building,"default",933,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26625.0>, <<"replication_building_933_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:37.801,ns_1@10.242.238.88:<0.10148.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26625.0>}, {'ns_1@10.242.238.89',<18124.27921.0>}]) [rebalance:info,2014-08-19T16:50:37.801,ns_1@10.242.238.88:<0.10139.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:37.801,ns_1@10.242.238.88:<0.10139.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 933 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:37.802,ns_1@10.242.238.88:<0.10139.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:37.802,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{933, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:37.806,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{678, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:37.806,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",678, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.10160.1>) [ns_server:debug,2014-08-19T16:50:37.806,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 678) [ns_server:debug,2014-08-19T16:50:37.807,ns_1@10.242.238.88:<0.10161.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:37.807,ns_1@10.242.238.88:<0.10161.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:37.807,ns_1@10.242.238.88:<0.10160.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 678 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:37.807,ns_1@10.242.238.88:<0.10166.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 678 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:37.807,ns_1@10.242.238.88:<0.10167.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 678 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:37.811,ns_1@10.242.238.88:<0.10168.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 678 into 'ns_1@10.242.238.89' is <18124.27940.0> [ns_server:debug,2014-08-19T16:50:37.813,ns_1@10.242.238.88:<0.10168.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 678 into 'ns_1@10.242.238.90' is <18125.24203.0> [rebalance:debug,2014-08-19T16:50:37.813,ns_1@10.242.238.88:<0.10160.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 678 is <0.10168.1> [views:debug,2014-08-19T16:50:37.825,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/385. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:37.825,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",385,active,0} [ns_server:debug,2014-08-19T16:50:37.842,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,833631}, tap_estimate, {replica_building,"default",678,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27940.0>, <<"replication_building_678_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:37.856,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,847858}, tap_estimate, {replica_building,"default",678,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24203.0>, <<"replication_building_678_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:37.857,ns_1@10.242.238.88:<0.10169.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.24203.0>}, {'ns_1@10.242.238.89',<18124.27940.0>}]) [rebalance:info,2014-08-19T16:50:37.857,ns_1@10.242.238.88:<0.10160.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:37.858,ns_1@10.242.238.88:<0.10160.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 678 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:37.858,ns_1@10.242.238.88:<0.10160.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:37.859,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{678, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:37.862,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{422, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:37.862,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",422, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.10186.1>) [ns_server:debug,2014-08-19T16:50:37.863,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 422) [ns_server:debug,2014-08-19T16:50:37.863,ns_1@10.242.238.88:<0.10187.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:37.863,ns_1@10.242.238.88:<0.10187.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:37.863,ns_1@10.242.238.88:<0.10186.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 422 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:37.864,ns_1@10.242.238.88:<0.10193.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 422 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:37.864,ns_1@10.242.238.88:<0.10194.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 422 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:37.868,ns_1@10.242.238.88:<0.10197.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 422 into 'ns_1@10.242.238.90' is <18125.24209.0> [ns_server:debug,2014-08-19T16:50:37.870,ns_1@10.242.238.88:<0.10197.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 422 into 'ns_1@10.242.238.89' is <18124.27951.0> [rebalance:debug,2014-08-19T16:50:37.870,ns_1@10.242.238.88:<0.10186.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 422 is <0.10197.1> [ns_server:debug,2014-08-19T16:50:37.898,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,889865}, tap_estimate, {replica_building,"default",422,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24209.0>, <<"replication_building_422_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:37.900,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 383. Nacking mccouch update. [views:debug,2014-08-19T16:50:37.900,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/383. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:37.901,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",383,active,0} [ns_server:debug,2014-08-19T16:50:37.902,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,668,902,591,280,953,825,642,514,459,148,876,693,565,510,382,1004, 927,799,744,616,433,250,122,978,850,667,539,484,356,901,773,718,590,407,224, 952,824,641,513,458,330,875,692,564,509,198,1003,926,798,743,615,432,304,977, 849,666,538,483,172,900,772,717,589,406,278,951,823,640,512,457,146,874,691, 563,508,380,1002,925,797,742,614,431,248,120,976,848,665,537,482,354,899,771, 716,588,405,222,950,822,767,639,456,328,873,690,562,507,196,1001,924,796,741, 613,430,302,975,847,664,536,481,170,898,770,715,587,404,276,949,821,766,638, 455,144,872,689,561,506,378,1000,923,795,740,612,429,246,118,974,846,663,535, 480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871,688,560,505, 194,922,794,739,611,428,300,973,845,662,534,479,168,896,768,713,585,402,274, 947,819,764,636,453,142,998,870,687,559,504,376,921,793,738,610,427,244,116, 972,844,661,533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324,997, 869,686,558,503,192,920,792,737,609,426,298,971,843,660,532,477,166,894,711, 583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791,736, 608,425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761, 633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969,841,658,530, 475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500, 372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214, 1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422,294, 967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136,992, 864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786, 731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756, 628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964,836,653, 525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861,678,550, 495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264, 1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,417,234, 962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753,625,442,314,987, 859,676,548,493,182,910,782,727,599,416,288,961,833,650,522,467,156,884,701, 573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781,726, 598,415,232,960,832,649,521,466,338,883,700,572,389,206,1011,934,806,751,623, 440,312,985,857,674,546,491,180,908,780,725,597,414,286,959,831,648,520,465, 154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,230,830,519, 464,698,387,1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569,258, 803,748,437,126,982,671,360,905,594,228,828,517,462,696,385,1007,930,619,308, 853,542,487,176,776,721,410,955,644,878,567,256,801,746,435,124,980,669,358, 903,592,226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719,408] [ns_server:debug,2014-08-19T16:50:37.912,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,903281}, tap_estimate, {replica_building,"default",422,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27951.0>, <<"replication_building_422_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:37.912,ns_1@10.242.238.88:<0.10204.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27951.0>}, {'ns_1@10.242.238.90',<18125.24209.0>}]) [rebalance:info,2014-08-19T16:50:37.912,ns_1@10.242.238.88:<0.10186.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:37.913,ns_1@10.242.238.88:<0.10186.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 422 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:37.913,ns_1@10.242.238.88:<0.10186.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:37.914,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{422, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:37.917,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{932, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:37.917,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",932, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.10216.1>) [ns_server:debug,2014-08-19T16:50:37.918,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 932) [ns_server:debug,2014-08-19T16:50:37.918,ns_1@10.242.238.88:<0.10217.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:37.918,ns_1@10.242.238.88:<0.10217.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:37.918,ns_1@10.242.238.88:<0.10216.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 932 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:37.918,ns_1@10.242.238.88:<0.10222.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 932 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:37.918,ns_1@10.242.238.88:<0.10223.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 932 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:37.922,ns_1@10.242.238.88:<0.10224.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 932 into 'ns_1@10.242.238.89' is <18124.27957.0> [ns_server:debug,2014-08-19T16:50:37.925,ns_1@10.242.238.88:<0.10224.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 932 into 'ns_1@10.242.238.91' is <18126.26645.0> [rebalance:debug,2014-08-19T16:50:37.925,ns_1@10.242.238.88:<0.10216.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 932 is <0.10224.1> [views:debug,2014-08-19T16:50:37.934,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/383. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:37.935,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",383,active,0} [ns_server:debug,2014-08-19T16:50:37.949,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,932, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}]}, {move_state,422, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_422_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_422_'ns_1@10.242.238.90'">>}]}, {move_state,678, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_678_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_678_'ns_1@10.242.238.89'">>}]}, {move_state,933, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_933_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_933_'ns_1@10.242.238.89'">>}]}, {move_state,423, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_423_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_423_'ns_1@10.242.238.90'">>}]}, {move_state,679, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_679_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_679_'ns_1@10.242.238.89'">>}]}, {move_state,934, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_934_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_934_'ns_1@10.242.238.89'">>}]}, {move_state,424, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_424_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_424_'ns_1@10.242.238.90'">>}]}, {move_state,680, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_680_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_680_'ns_1@10.242.238.89'">>}]}, {move_state,935, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_935_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_935_'ns_1@10.242.238.89'">>}]}, {move_state,425, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_425_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_425_'ns_1@10.242.238.90'">>}]}, {move_state,681, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_681_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_681_'ns_1@10.242.238.89'">>}]}, {move_state,936, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_936_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_936_'ns_1@10.242.238.89'">>}]}, {move_state,426, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_426_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_426_'ns_1@10.242.238.90'">>}]}, {move_state,682, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_682_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_682_'ns_1@10.242.238.91'">>}]}, {move_state,937, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_937_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_937_'ns_1@10.242.238.89'">>}]}] [ns_server:debug,2014-08-19T16:50:37.952,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 422, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:37.952,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 678, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:37.953,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 933, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:37.953,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,944561}, tap_estimate, {replica_building,"default",932,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27957.0>, <<"replication_building_932_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:37.953,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 423, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:37.954,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 679, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:37.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 934, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:37.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 424, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:37.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 680, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:37.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 935, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:37.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 425, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:37.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 681, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:37.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 936, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:37.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 426, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:37.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 682, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:37.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 937, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:37.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452637,959608}, tap_estimate, {replica_building,"default",932,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26645.0>, <<"replication_building_932_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:37.969,ns_1@10.242.238.88:<0.10225.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26645.0>}, {'ns_1@10.242.238.89',<18124.27957.0>}]) [rebalance:info,2014-08-19T16:50:37.969,ns_1@10.242.238.88:<0.10216.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:37.969,ns_1@10.242.238.88:<0.10216.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 932 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:37.970,ns_1@10.242.238.88:<0.10216.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:37.970,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{932, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:37.974,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{677, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:37.974,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",677, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.10261.1>) [ns_server:debug,2014-08-19T16:50:37.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 677) [ns_server:debug,2014-08-19T16:50:37.975,ns_1@10.242.238.88:<0.10262.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:37.975,ns_1@10.242.238.88:<0.10262.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:37.975,ns_1@10.242.238.88:<0.10261.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 677 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:37.975,ns_1@10.242.238.88:<0.10267.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 677 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:37.975,ns_1@10.242.238.88:<0.10268.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 677 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:37.979,ns_1@10.242.238.88:<0.10269.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 677 into 'ns_1@10.242.238.89' is <18124.27976.0> [ns_server:debug,2014-08-19T16:50:37.981,ns_1@10.242.238.88:<0.10269.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 677 into 'ns_1@10.242.238.90' is <18125.24228.0> [rebalance:debug,2014-08-19T16:50:37.981,ns_1@10.242.238.88:<0.10261.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 677 is <0.10269.1> [ns_server:debug,2014-08-19T16:50:38.009,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,517}, tap_estimate, {replica_building,"default",677,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27976.0>, <<"replication_building_677_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:38.010,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 381. Nacking mccouch update. [views:debug,2014-08-19T16:50:38.010,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/381. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:38.010,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",381,active,0} [ns_server:debug,2014-08-19T16:50:38.012,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,668,902,591,280,953,825,642,514,459,148,876,693,565,510,382,1004, 927,799,744,616,433,250,122,978,850,667,539,484,356,901,773,718,590,407,224, 952,824,641,513,458,330,875,692,564,509,381,198,1003,926,798,743,615,432,304, 977,849,666,538,483,172,900,772,717,589,406,278,951,823,640,512,457,146,874, 691,563,508,380,1002,925,797,742,614,431,248,120,976,848,665,537,482,354,899, 771,716,588,405,222,950,822,767,639,456,328,873,690,562,507,196,1001,924,796, 741,613,430,302,975,847,664,536,481,170,898,770,715,587,404,276,949,821,766, 638,455,144,872,689,561,506,378,1000,923,795,740,612,429,246,118,974,846,663, 535,480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871,688,560, 505,194,922,794,739,611,428,300,973,845,662,534,479,168,896,768,713,585,402, 274,947,819,764,636,453,142,998,870,687,559,504,376,921,793,738,610,427,244, 116,972,844,661,533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324, 997,869,686,558,503,192,920,792,737,609,426,298,971,843,660,532,477,166,894, 711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791, 736,608,425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816, 761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969,841,658, 530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555, 500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397, 214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422, 294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136, 992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344, 889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914, 786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811, 756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964,836, 653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861,678, 550,495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392, 264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,417, 234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753,625,442,314, 987,859,676,548,493,182,910,782,727,599,416,288,961,833,650,522,467,156,884, 701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781, 726,598,415,232,960,832,649,521,466,338,883,700,572,389,206,1011,934,806,751, 623,440,312,985,857,674,546,491,180,908,780,725,597,414,286,959,831,648,520, 465,154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,230,830, 519,464,698,387,1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569, 258,803,748,437,126,982,671,360,905,594,228,828,517,462,696,385,1007,930,619, 308,853,542,487,176,776,721,410,955,644,878,567,256,801,746,435,124,980,669, 358,903,592,226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719, 408] [ns_server:debug,2014-08-19T16:50:38.024,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,15464}, tap_estimate, {replica_building,"default",677,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24228.0>, <<"replication_building_677_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:38.025,ns_1@10.242.238.88:<0.10276.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.24228.0>}, {'ns_1@10.242.238.89',<18124.27976.0>}]) [rebalance:info,2014-08-19T16:50:38.025,ns_1@10.242.238.88:<0.10261.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:38.026,ns_1@10.242.238.88:<0.10261.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 677 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:38.026,ns_1@10.242.238.88:<0.10261.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:38.027,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{677, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:38.030,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{421, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:38.030,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",421, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.10288.1>) [ns_server:debug,2014-08-19T16:50:38.030,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 421) [ns_server:debug,2014-08-19T16:50:38.031,ns_1@10.242.238.88:<0.10289.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:38.031,ns_1@10.242.238.88:<0.10289.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:38.031,ns_1@10.242.238.88:<0.10288.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 421 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:38.031,ns_1@10.242.238.88:<0.10294.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 421 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:38.031,ns_1@10.242.238.88:<0.10295.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 421 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:38.035,ns_1@10.242.238.88:<0.10296.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 421 into 'ns_1@10.242.238.90' is <18125.24234.0> [ns_server:debug,2014-08-19T16:50:38.037,ns_1@10.242.238.88:<0.10296.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 421 into 'ns_1@10.242.238.89' is <18124.27981.0> [rebalance:debug,2014-08-19T16:50:38.037,ns_1@10.242.238.88:<0.10288.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 421 is <0.10296.1> [views:debug,2014-08-19T16:50:38.043,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/381. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:38.044,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",381,active,0} [ns_server:debug,2014-08-19T16:50:38.066,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,57710}, tap_estimate, {replica_building,"default",421,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24234.0>, <<"replication_building_421_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:38.078,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,69961}, tap_estimate, {replica_building,"default",421,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27981.0>, <<"replication_building_421_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:38.079,ns_1@10.242.238.88:<0.10297.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.27981.0>}, {'ns_1@10.242.238.90',<18125.24234.0>}]) [rebalance:info,2014-08-19T16:50:38.079,ns_1@10.242.238.88:<0.10288.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:38.080,ns_1@10.242.238.88:<0.10288.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 421 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:38.080,ns_1@10.242.238.88:<0.10288.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:38.081,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{421, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:38.084,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{931, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:38.084,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",931, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.10309.1>) [ns_server:debug,2014-08-19T16:50:38.084,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 931) [ns_server:debug,2014-08-19T16:50:38.085,ns_1@10.242.238.88:<0.10310.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:38.085,ns_1@10.242.238.88:<0.10310.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:38.085,ns_1@10.242.238.88:<0.10309.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 931 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:38.085,ns_1@10.242.238.88:<0.10315.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 931 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:38.085,ns_1@10.242.238.88:<0.10316.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 931 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:38.089,ns_1@10.242.238.88:<0.10317.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 931 into 'ns_1@10.242.238.89' is <18124.27987.0> [ns_server:debug,2014-08-19T16:50:38.091,ns_1@10.242.238.88:<0.10317.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 931 into 'ns_1@10.242.238.91' is <18126.26665.0> [rebalance:debug,2014-08-19T16:50:38.091,ns_1@10.242.238.88:<0.10309.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 931 is <0.10317.1> [ns_server:debug,2014-08-19T16:50:38.119,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,110723}, tap_estimate, {replica_building,"default",931,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27987.0>, <<"replication_building_931_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:38.133,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,124056}, tap_estimate, {replica_building,"default",931,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26665.0>, <<"replication_building_931_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:38.133,ns_1@10.242.238.88:<0.10318.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26665.0>}, {'ns_1@10.242.238.89',<18124.27987.0>}]) [rebalance:info,2014-08-19T16:50:38.133,ns_1@10.242.238.88:<0.10309.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:38.134,ns_1@10.242.238.88:<0.10309.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 931 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:38.134,ns_1@10.242.238.88:<0.10309.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:38.134,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{931, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:38.138,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{676, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:38.138,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",676, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.10344.1>) [ns_server:debug,2014-08-19T16:50:38.138,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 676) [ns_server:debug,2014-08-19T16:50:38.139,ns_1@10.242.238.88:<0.10345.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:38.139,ns_1@10.242.238.88:<0.10345.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:38.139,ns_1@10.242.238.88:<0.10344.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 676 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:38.139,ns_1@10.242.238.88:<0.10350.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 676 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:38.139,ns_1@10.242.238.88:<0.10351.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 676 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:38.143,ns_1@10.242.238.88:<0.10352.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 676 into 'ns_1@10.242.238.89' is <18124.27992.0> [ns_server:debug,2014-08-19T16:50:38.146,ns_1@10.242.238.88:<0.10352.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 676 into 'ns_1@10.242.238.90' is <18125.24239.0> [rebalance:debug,2014-08-19T16:50:38.146,ns_1@10.242.238.88:<0.10344.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 676 is <0.10352.1> [ns_server:debug,2014-08-19T16:50:38.160,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 379. Nacking mccouch update. [views:debug,2014-08-19T16:50:38.160,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/379. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:38.161,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",379,active,0} [ns_server:debug,2014-08-19T16:50:38.163,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,668,902,591,280,825,514,459,148,876,693,565,510,382,1004,927,799, 744,616,433,250,122,978,850,667,539,484,356,901,773,718,590,407,224,952,824, 641,513,458,330,875,692,564,509,381,198,1003,926,798,743,615,432,304,977,849, 666,538,483,172,900,772,717,589,406,278,951,823,640,512,457,146,874,691,563, 508,380,1002,925,797,742,614,431,248,120,976,848,665,537,482,354,899,771,716, 588,405,222,950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741, 613,430,302,975,847,664,536,481,170,898,770,715,587,404,276,949,821,766,638, 455,144,872,689,561,506,378,1000,923,795,740,612,429,246,118,974,846,663,535, 480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871,688,560,505, 194,922,794,739,611,428,300,973,845,662,534,479,168,896,768,713,585,402,274, 947,819,764,636,453,142,998,870,687,559,504,376,921,793,738,610,427,244,116, 972,844,661,533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324,997, 869,686,558,503,192,920,792,737,609,426,298,971,843,660,532,477,166,894,711, 583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791,736, 608,425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761, 633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969,841,658,530, 475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500, 372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214, 1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422,294, 967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136,992, 864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914,786, 731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756, 628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964,836,653, 525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861,678,550, 495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392,264, 1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,417,234, 962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753,625,442,314,987, 859,676,548,493,182,910,782,727,599,416,288,961,833,650,522,467,156,884,701, 573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781,726, 598,415,232,960,832,649,521,466,338,883,700,572,389,206,1011,934,806,751,623, 440,312,985,857,674,546,491,180,908,780,725,597,414,286,959,831,648,520,465, 154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,230,830,519, 464,698,387,1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569,258, 803,748,437,126,982,671,360,905,594,228,828,517,462,696,385,1007,930,619,308, 853,542,487,176,776,721,410,955,644,878,567,256,801,746,435,124,980,669,358, 903,592,226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719,408, 953,642] [ns_server:debug,2014-08-19T16:50:38.175,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,166339}, tap_estimate, {replica_building,"default",676,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.27992.0>, <<"replication_building_676_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:38.193,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,184542}, tap_estimate, {replica_building,"default",676,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24239.0>, <<"replication_building_676_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:38.193,ns_1@10.242.238.88:<0.10353.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.24239.0>}, {'ns_1@10.242.238.89',<18124.27992.0>}]) [rebalance:info,2014-08-19T16:50:38.194,ns_1@10.242.238.88:<0.10344.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:38.194,ns_1@10.242.238.88:<0.10344.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 676 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:38.195,ns_1@10.242.238.88:<0.10344.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:38.195,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{676, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:38.199,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{420, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:38.199,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",420, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.10365.1>) [ns_server:debug,2014-08-19T16:50:38.199,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 420) [ns_server:debug,2014-08-19T16:50:38.200,ns_1@10.242.238.88:<0.10366.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:38.200,ns_1@10.242.238.88:<0.10366.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:38.200,ns_1@10.242.238.88:<0.10365.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 420 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:38.200,ns_1@10.242.238.88:<0.10371.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 420 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:38.200,ns_1@10.242.238.88:<0.10372.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 420 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:38.205,ns_1@10.242.238.88:<0.10373.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 420 into 'ns_1@10.242.238.90' is <18125.24259.0> [ns_server:debug,2014-08-19T16:50:38.208,ns_1@10.242.238.88:<0.10373.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 420 into 'ns_1@10.242.238.89' is <18124.28011.0> [rebalance:debug,2014-08-19T16:50:38.208,ns_1@10.242.238.88:<0.10365.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 420 is <0.10373.1> [rebalance:info,2014-08-19T16:50:38.220,ns_1@10.242.238.88:<0.9836.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 937 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:38.220,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 937 state to active [rebalance:info,2014-08-19T16:50:38.221,ns_1@10.242.238.88:<0.9836.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 937 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:38.221,ns_1@10.242.238.88:<0.9836.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:38.236,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/379. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:38.236,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",379,active,0} [ns_server:debug,2014-08-19T16:50:38.237,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,228443}, tap_estimate, {replica_building,"default",420,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24259.0>, <<"replication_building_420_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:38.257,ns_1@10.242.238.88:<0.10374.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.28011.0>}, {'ns_1@10.242.238.90',<18125.24259.0>}]) [rebalance:info,2014-08-19T16:50:38.257,ns_1@10.242.238.88:<0.10365.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:38.257,ns_1@10.242.238.88:<0.10365.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 420 on ns_1@10.242.238.88 [ns_server:debug,2014-08-19T16:50:38.257,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,247366}, tap_estimate, {replica_building,"default",420,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28011.0>, <<"replication_building_420_'ns_1@10.242.238.89'">>} [rebalance:info,2014-08-19T16:50:38.258,ns_1@10.242.238.88:<0.10365.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:38.258,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{420, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:38.262,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{930, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:38.262,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",930, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.10390.1>) [ns_server:debug,2014-08-19T16:50:38.262,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 930) [ns_server:debug,2014-08-19T16:50:38.262,ns_1@10.242.238.88:<0.10391.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:38.263,ns_1@10.242.238.88:<0.10391.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:38.263,ns_1@10.242.238.88:<0.10390.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 930 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:38.263,ns_1@10.242.238.88:<0.10396.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 930 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:38.263,ns_1@10.242.238.88:<0.10397.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 930 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:38.266,ns_1@10.242.238.88:<0.10398.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 930 into 'ns_1@10.242.238.89' is <18124.28020.0> [ns_server:debug,2014-08-19T16:50:38.269,ns_1@10.242.238.88:<0.10398.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 930 into 'ns_1@10.242.238.91' is <18126.26688.0> [rebalance:debug,2014-08-19T16:50:38.269,ns_1@10.242.238.88:<0.10390.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 930 is <0.10398.1> [ns_server:debug,2014-08-19T16:50:38.298,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,289223}, tap_estimate, {replica_building,"default",930,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28020.0>, <<"replication_building_930_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:38.313,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,304380}, tap_estimate, {replica_building,"default",930,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26688.0>, <<"replication_building_930_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:38.313,ns_1@10.242.238.88:<0.10399.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26688.0>}, {'ns_1@10.242.238.89',<18124.28020.0>}]) [rebalance:info,2014-08-19T16:50:38.314,ns_1@10.242.238.88:<0.10390.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:38.314,ns_1@10.242.238.88:<0.10390.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 930 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:38.315,ns_1@10.242.238.88:<0.10390.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:38.315,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{930, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:38.319,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{675, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:38.319,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",675, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.10425.1>) [ns_server:debug,2014-08-19T16:50:38.319,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 675) [ns_server:debug,2014-08-19T16:50:38.319,ns_1@10.242.238.88:<0.10426.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:38.320,ns_1@10.242.238.88:<0.10426.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:38.320,ns_1@10.242.238.88:<0.10425.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 675 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:38.320,ns_1@10.242.238.88:<0.10431.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 675 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:38.320,ns_1@10.242.238.88:<0.10432.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 675 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:38.324,ns_1@10.242.238.88:<0.10433.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 675 into 'ns_1@10.242.238.89' is <18124.28025.0> [ns_server:debug,2014-08-19T16:50:38.327,ns_1@10.242.238.88:<0.10433.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 675 into 'ns_1@10.242.238.90' is <18125.24264.0> [rebalance:debug,2014-08-19T16:50:38.327,ns_1@10.242.238.88:<0.10425.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 675 is <0.10433.1> [ns_server:debug,2014-08-19T16:50:38.336,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 377. Nacking mccouch update. [views:debug,2014-08-19T16:50:38.336,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/377. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:38.336,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",377,active,0} [ns_server:debug,2014-08-19T16:50:38.338,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,668,902,591,280,825,514,459,148,876,693,565,510,382,1004,927,799, 744,616,433,250,122,978,850,667,539,484,356,901,773,718,590,407,224,952,824, 641,513,458,330,875,692,564,509,381,198,1003,926,798,743,615,432,304,977,849, 666,538,483,172,900,772,717,589,406,278,951,823,640,512,457,146,874,691,563, 508,380,1002,925,797,742,614,431,248,120,976,848,665,537,482,354,899,771,716, 588,405,222,950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741, 613,430,302,975,847,664,536,481,170,898,770,715,587,404,276,949,821,766,638, 455,144,872,689,561,506,378,1000,923,795,740,612,429,246,118,974,846,663,535, 480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871,688,560,505, 377,194,922,794,739,611,428,300,973,845,662,534,479,168,896,768,713,585,402, 274,947,819,764,636,453,142,998,870,687,559,504,376,921,793,738,610,427,244, 116,972,844,661,533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324, 997,869,686,558,503,192,920,792,737,609,426,298,971,843,660,532,477,166,894, 711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791, 736,608,425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816, 761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969,841,658, 530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555, 500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397, 214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605,422, 294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136, 992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344, 889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,186,914, 786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811, 756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964,836, 653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861,678, 550,495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392, 264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,417, 234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753,625,442,314, 987,859,676,548,493,182,910,782,727,599,416,288,961,833,650,522,467,156,884, 701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909,781, 726,598,415,232,960,832,649,521,466,338,883,700,572,389,206,1011,934,806,751, 623,440,312,985,857,674,546,491,180,908,780,725,597,414,286,959,831,648,520, 465,154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,230,830, 519,464,698,387,1009,932,621,310,855,544,489,178,778,723,412,957,646,880,569, 258,803,748,437,126,982,671,360,905,594,228,828,517,462,696,385,1007,930,619, 308,853,542,487,176,776,721,410,955,644,878,567,256,801,746,435,124,980,669, 358,903,592,226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719, 408,953,642] [ns_server:debug,2014-08-19T16:50:38.355,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,346040}, tap_estimate, {replica_building,"default",675,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28025.0>, <<"replication_building_675_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:38.371,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,362688}, tap_estimate, {replica_building,"default",675,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24264.0>, <<"replication_building_675_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:38.372,ns_1@10.242.238.88:<0.10434.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.24264.0>}, {'ns_1@10.242.238.89',<18124.28025.0>}]) [rebalance:info,2014-08-19T16:50:38.372,ns_1@10.242.238.88:<0.10425.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:38.373,ns_1@10.242.238.88:<0.10425.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 675 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:38.373,ns_1@10.242.238.88:<0.10425.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:38.374,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{675, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:38.377,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{419, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:38.377,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",419, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.10446.1>) [ns_server:debug,2014-08-19T16:50:38.377,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 419) [ns_server:debug,2014-08-19T16:50:38.378,ns_1@10.242.238.88:<0.10447.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:38.378,ns_1@10.242.238.88:<0.10447.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:38.378,ns_1@10.242.238.88:<0.10446.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 419 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:38.378,ns_1@10.242.238.88:<0.10452.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 419 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:38.378,ns_1@10.242.238.88:<0.10453.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 419 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:38.382,ns_1@10.242.238.88:<0.10454.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 419 into 'ns_1@10.242.238.90' is <18125.24284.0> [ns_server:debug,2014-08-19T16:50:38.385,ns_1@10.242.238.88:<0.10454.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 419 into 'ns_1@10.242.238.89' is <18124.28044.0> [rebalance:debug,2014-08-19T16:50:38.385,ns_1@10.242.238.88:<0.10446.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 419 is <0.10454.1> [views:debug,2014-08-19T16:50:38.403,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/377. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:38.403,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",377,active,0} [ns_server:debug,2014-08-19T16:50:38.414,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,404996}, tap_estimate, {replica_building,"default",419,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24284.0>, <<"replication_building_419_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:38.427,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,418239}, tap_estimate, {replica_building,"default",419,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28044.0>, <<"replication_building_419_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:38.427,ns_1@10.242.238.88:<0.10455.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.28044.0>}, {'ns_1@10.242.238.90',<18125.24284.0>}]) [rebalance:info,2014-08-19T16:50:38.428,ns_1@10.242.238.88:<0.10446.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:38.428,ns_1@10.242.238.88:<0.10446.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 419 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:38.429,ns_1@10.242.238.88:<0.10446.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:38.429,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{419, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:38.433,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{929, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:38.433,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",929, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.10468.1>) [ns_server:debug,2014-08-19T16:50:38.433,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 929) [ns_server:debug,2014-08-19T16:50:38.433,ns_1@10.242.238.88:<0.10469.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:38.434,ns_1@10.242.238.88:<0.10469.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:38.434,ns_1@10.242.238.88:<0.10468.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 929 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:38.434,ns_1@10.242.238.88:<0.10474.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 929 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:38.434,ns_1@10.242.238.88:<0.10475.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 929 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:38.440,ns_1@10.242.238.88:<0.10476.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 929 into 'ns_1@10.242.238.89' is <18124.28050.0> [ns_server:debug,2014-08-19T16:50:38.441,ns_1@10.242.238.88:<0.10476.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 929 into 'ns_1@10.242.238.91' is <18126.26708.0> [rebalance:debug,2014-08-19T16:50:38.441,ns_1@10.242.238.88:<0.10468.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 929 is <0.10476.1> [ns_server:debug,2014-08-19T16:50:38.471,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,462217}, tap_estimate, {replica_building,"default",929,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28050.0>, <<"replication_building_929_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:38.484,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,475073}, tap_estimate, {replica_building,"default",929,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26708.0>, <<"replication_building_929_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:38.484,ns_1@10.242.238.88:<0.10477.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26708.0>}, {'ns_1@10.242.238.89',<18124.28050.0>}]) [rebalance:info,2014-08-19T16:50:38.484,ns_1@10.242.238.88:<0.10468.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:38.485,ns_1@10.242.238.88:<0.10468.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 929 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:38.485,ns_1@10.242.238.88:<0.10468.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:38.486,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{929, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:38.489,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{674, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:38.490,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",674, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.10503.1>) [ns_server:debug,2014-08-19T16:50:38.490,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 674) [ns_server:debug,2014-08-19T16:50:38.490,ns_1@10.242.238.88:<0.10504.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:38.491,ns_1@10.242.238.88:<0.10504.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:38.491,ns_1@10.242.238.88:<0.10503.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 674 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:38.491,ns_1@10.242.238.88:<0.10509.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 674 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:38.491,ns_1@10.242.238.88:<0.10510.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 674 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:38.495,ns_1@10.242.238.88:<0.10511.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 674 into 'ns_1@10.242.238.89' is <18124.28055.0> [ns_server:debug,2014-08-19T16:50:38.498,ns_1@10.242.238.88:<0.10511.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 674 into 'ns_1@10.242.238.90' is <18125.24311.0> [rebalance:debug,2014-08-19T16:50:38.498,ns_1@10.242.238.88:<0.10503.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 674 is <0.10511.1> [ns_server:debug,2014-08-19T16:50:38.525,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,516914}, tap_estimate, {replica_building,"default",674,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28055.0>, <<"replication_building_674_'ns_1@10.242.238.89'">>} [rebalance:info,2014-08-19T16:50:38.539,ns_1@10.242.238.88:<0.9857.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 682 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:38.539,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 682 state to active [ns_server:debug,2014-08-19T16:50:38.540,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,531180}, tap_estimate, {replica_building,"default",674,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24311.0>, <<"replication_building_674_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:38.540,ns_1@10.242.238.88:<0.10512.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.24311.0>}, {'ns_1@10.242.238.89',<18124.28055.0>}]) [rebalance:info,2014-08-19T16:50:38.540,ns_1@10.242.238.88:<0.10503.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:38.541,ns_1@10.242.238.88:<0.9857.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 682 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:38.541,ns_1@10.242.238.88:<0.9857.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:38.541,ns_1@10.242.238.88:<0.10503.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 674 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:38.542,ns_1@10.242.238.88:<0.10503.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:38.542,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{674, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:38.545,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 375. Nacking mccouch update. [views:debug,2014-08-19T16:50:38.545,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/375. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:38.545,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",375,active,0} [ns_server:debug,2014-08-19T16:50:38.545,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{418, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:38.546,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",418, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.10528.1>) [ns_server:debug,2014-08-19T16:50:38.546,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 418) [ns_server:debug,2014-08-19T16:50:38.546,ns_1@10.242.238.88:<0.10529.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:38.546,ns_1@10.242.238.88:<0.10529.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:38.546,ns_1@10.242.238.88:<0.10528.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 418 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:38.547,ns_1@10.242.238.88:<0.10534.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 418 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:38.547,ns_1@10.242.238.88:<0.10535.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 418 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:38.547,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,668,902,591,280,825,514,459,148,876,693,565,510,382,1004,927,799, 744,616,433,250,122,978,850,667,539,484,356,901,773,718,590,407,224,952,824, 641,513,458,330,875,692,564,509,381,198,1003,926,798,743,615,432,304,977,849, 666,538,483,172,900,772,717,589,406,278,951,823,640,512,457,146,874,691,563, 508,380,1002,925,797,742,614,431,248,120,976,848,665,537,482,354,899,771,716, 588,405,222,950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741, 613,430,302,975,847,664,536,481,170,898,770,715,587,404,276,949,821,766,638, 455,144,872,689,561,506,378,1000,923,795,740,612,429,246,118,974,846,663,535, 480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871,688,560,505, 377,194,922,794,739,611,428,300,973,845,662,534,479,168,896,768,713,585,402, 274,947,819,764,636,453,142,998,870,687,559,504,376,921,793,738,610,427,244, 116,972,844,661,533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324, 997,869,686,558,503,375,192,920,792,737,609,426,298,971,843,660,532,477,166, 894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919, 791,736,608,425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944, 816,761,633,450,322,995,867,684,556,501,190,918,790,735,607,424,296,969,841, 658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683, 555,500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580, 397,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733,605, 422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447, 136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472, 344,889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,186, 914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939, 811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964, 836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861, 678,550,495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575, 392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600, 417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753,625,442, 314,987,859,676,548,493,182,910,782,727,599,416,288,961,833,650,522,467,156, 884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909, 781,726,598,415,232,960,832,649,521,466,338,883,700,572,389,206,1011,934,806, 751,623,440,312,985,857,674,546,491,180,908,780,725,597,414,286,959,831,648, 520,465,154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,230, 830,519,464,698,387,1009,932,621,310,855,544,489,178,778,723,412,957,646,880, 569,258,803,748,437,126,982,671,360,905,594,228,828,517,462,696,385,1007,930, 619,308,853,542,487,176,776,721,410,955,644,878,567,256,801,746,435,124,980, 669,358,903,592,226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774, 719,408,953,642] [ns_server:debug,2014-08-19T16:50:38.550,ns_1@10.242.238.88:<0.10536.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 418 into 'ns_1@10.242.238.90' is <18125.24322.0> [ns_server:debug,2014-08-19T16:50:38.552,ns_1@10.242.238.88:<0.10536.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 418 into 'ns_1@10.242.238.89' is <18124.28060.0> [rebalance:debug,2014-08-19T16:50:38.552,ns_1@10.242.238.88:<0.10528.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 418 is <0.10536.1> [ns_server:debug,2014-08-19T16:50:38.582,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,573125}, tap_estimate, {replica_building,"default",418,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24322.0>, <<"replication_building_418_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:38.598,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,589947}, tap_estimate, {replica_building,"default",418,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28060.0>, <<"replication_building_418_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:38.599,ns_1@10.242.238.88:<0.10537.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.28060.0>}, {'ns_1@10.242.238.90',<18125.24322.0>}]) [rebalance:info,2014-08-19T16:50:38.599,ns_1@10.242.238.88:<0.10528.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:38.600,ns_1@10.242.238.88:<0.10528.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 418 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:38.600,ns_1@10.242.238.88:<0.10528.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:38.601,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{418, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:38.604,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{928, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:38.604,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",928, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.10549.1>) [ns_server:debug,2014-08-19T16:50:38.604,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 928) [ns_server:debug,2014-08-19T16:50:38.605,ns_1@10.242.238.88:<0.10550.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:38.605,ns_1@10.242.238.88:<0.10550.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:38.605,ns_1@10.242.238.88:<0.10549.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 928 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:38.605,ns_1@10.242.238.88:<0.10555.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 928 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:38.605,ns_1@10.242.238.88:<0.10556.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 928 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:38.609,ns_1@10.242.238.88:<0.10557.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 928 into 'ns_1@10.242.238.89' is <18124.28080.0> [ns_server:debug,2014-08-19T16:50:38.612,ns_1@10.242.238.88:<0.10557.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 928 into 'ns_1@10.242.238.91' is <18126.26724.0> [rebalance:debug,2014-08-19T16:50:38.612,ns_1@10.242.238.88:<0.10549.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 928 is <0.10557.1> [views:debug,2014-08-19T16:50:38.621,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/375. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:38.621,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",375,active,0} [ns_server:debug,2014-08-19T16:50:38.640,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,631682}, tap_estimate, {replica_building,"default",928,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28080.0>, <<"replication_building_928_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:38.653,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,644894}, tap_estimate, {replica_building,"default",928,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26724.0>, <<"replication_building_928_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:38.654,ns_1@10.242.238.88:<0.10558.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26724.0>}, {'ns_1@10.242.238.89',<18124.28080.0>}]) [rebalance:info,2014-08-19T16:50:38.654,ns_1@10.242.238.88:<0.10549.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:38.655,ns_1@10.242.238.88:<0.10549.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 928 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:38.655,ns_1@10.242.238.88:<0.10549.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:38.656,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{928, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:38.659,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{673, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:38.659,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",673, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.10570.1>) [ns_server:debug,2014-08-19T16:50:38.659,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 673) [ns_server:debug,2014-08-19T16:50:38.660,ns_1@10.242.238.88:<0.10571.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:38.660,ns_1@10.242.238.88:<0.10571.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:38.660,ns_1@10.242.238.88:<0.10570.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 673 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:38.660,ns_1@10.242.238.88:<0.10576.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 673 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:38.661,ns_1@10.242.238.88:<0.10577.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 673 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:38.667,ns_1@10.242.238.88:<0.10578.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 673 into 'ns_1@10.242.238.89' is <18124.28085.0> [ns_server:debug,2014-08-19T16:50:38.670,ns_1@10.242.238.88:<0.10578.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 673 into 'ns_1@10.242.238.90' is <18125.24341.0> [rebalance:debug,2014-08-19T16:50:38.670,ns_1@10.242.238.88:<0.10570.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 673 is <0.10578.1> [ns_server:debug,2014-08-19T16:50:38.702,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,693927}, tap_estimate, {replica_building,"default",673,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28085.0>, <<"replication_building_673_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:38.712,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,703563}, tap_estimate, {replica_building,"default",673,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24341.0>, <<"replication_building_673_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:38.713,ns_1@10.242.238.88:<0.10579.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.24341.0>}, {'ns_1@10.242.238.89',<18124.28085.0>}]) [rebalance:info,2014-08-19T16:50:38.713,ns_1@10.242.238.88:<0.10570.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:38.713,ns_1@10.242.238.88:<0.10570.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 673 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:38.714,ns_1@10.242.238.88:<0.10570.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:38.714,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{673, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:38.718,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{417, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:38.718,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",417, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.10605.1>) [ns_server:debug,2014-08-19T16:50:38.718,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 417) [ns_server:debug,2014-08-19T16:50:38.718,ns_1@10.242.238.88:<0.10606.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:38.718,ns_1@10.242.238.88:<0.10606.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:38.719,ns_1@10.242.238.88:<0.10605.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 417 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:38.719,ns_1@10.242.238.88:<0.10611.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 417 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:38.719,ns_1@10.242.238.88:<0.10612.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 417 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:38.722,ns_1@10.242.238.88:<0.10613.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 417 into 'ns_1@10.242.238.90' is <18125.24347.0> [ns_server:debug,2014-08-19T16:50:38.724,ns_1@10.242.238.88:<0.10613.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 417 into 'ns_1@10.242.238.89' is <18124.28090.0> [rebalance:debug,2014-08-19T16:50:38.724,ns_1@10.242.238.88:<0.10605.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 417 is <0.10613.1> [ns_server:debug,2014-08-19T16:50:38.754,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,744969}, tap_estimate, {replica_building,"default",417,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24347.0>, <<"replication_building_417_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:38.767,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,758192}, tap_estimate, {replica_building,"default",417,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28090.0>, <<"replication_building_417_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:38.767,ns_1@10.242.238.88:<0.10614.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.28090.0>}, {'ns_1@10.242.238.90',<18125.24347.0>}]) [rebalance:info,2014-08-19T16:50:38.767,ns_1@10.242.238.88:<0.10605.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:38.768,ns_1@10.242.238.88:<0.10605.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 417 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:38.768,ns_1@10.242.238.88:<0.10605.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:38.769,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{417, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:38.772,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{927, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:38.772,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",927, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.10626.1>) [ns_server:debug,2014-08-19T16:50:38.773,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 927) [ns_server:debug,2014-08-19T16:50:38.773,ns_1@10.242.238.88:<0.10627.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:38.773,ns_1@10.242.238.88:<0.10627.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:38.773,ns_1@10.242.238.88:<0.10626.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 927 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:38.773,ns_1@10.242.238.88:<0.10632.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 927 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:38.774,ns_1@10.242.238.88:<0.10633.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 927 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:38.777,ns_1@10.242.238.88:<0.10634.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 927 into 'ns_1@10.242.238.89' is <18124.28110.0> [ns_server:debug,2014-08-19T16:50:38.779,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 373. Nacking mccouch update. [views:debug,2014-08-19T16:50:38.779,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/373. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:38.779,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",373,active,0} [ns_server:debug,2014-08-19T16:50:38.780,ns_1@10.242.238.88:<0.10634.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 927 into 'ns_1@10.242.238.91' is <18126.26744.0> [rebalance:debug,2014-08-19T16:50:38.780,ns_1@10.242.238.88:<0.10626.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 927 is <0.10634.1> [ns_server:debug,2014-08-19T16:50:38.781,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,668,902,591,280,825,514,459,148,876,693,565,510,382,1004,927,799, 744,616,433,250,122,978,850,667,539,484,356,901,773,718,590,407,224,952,824, 641,513,458,330,875,692,564,509,381,198,1003,926,798,743,615,432,304,977,849, 666,538,483,172,900,772,717,589,406,278,951,823,640,512,457,146,874,691,563, 508,380,1002,925,797,742,614,431,248,120,976,848,665,537,482,354,899,771,716, 588,405,222,950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741, 613,430,302,975,847,664,536,481,170,898,770,715,587,404,276,949,821,766,638, 455,144,872,689,561,506,378,1000,923,795,740,612,429,246,118,974,846,663,535, 480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871,688,560,505, 377,194,922,794,739,611,428,300,973,845,662,534,479,168,896,768,713,585,402, 274,947,819,764,636,453,142,998,870,687,559,504,376,921,793,738,610,427,244, 116,972,844,661,533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324, 997,869,686,558,503,375,192,920,792,737,609,426,298,971,843,660,532,477,166, 894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919, 791,736,608,425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944, 816,761,633,450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,969, 841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866, 683,555,500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708, 580,397,214,1019,942,814,759,631,448,320,993,865,682,554,499,188,916,788,733, 605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630, 447,136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527, 472,344,889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497, 186,914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016, 939,811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108, 964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989, 861,678,550,495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703, 575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728, 600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753,625, 442,314,987,859,676,548,493,182,910,782,727,599,416,288,961,833,650,522,467, 156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364, 909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389,206,1011,934, 806,751,623,440,312,985,857,674,546,491,180,908,780,725,597,414,286,959,831, 648,520,465,154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596, 230,830,519,464,698,387,1009,932,621,310,855,544,489,178,778,723,412,957,646, 880,569,258,803,748,437,126,982,671,360,905,594,228,828,517,462,696,385,1007, 930,619,308,853,542,487,176,776,721,410,955,644,878,567,256,801,746,435,124, 980,669,358,903,592,226,826,515,460,694,383,1005,928,617,306,851,540,485,174, 774,719,408,953,642] [ns_server:debug,2014-08-19T16:50:38.810,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,801166}, tap_estimate, {replica_building,"default",927,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28110.0>, <<"replication_building_927_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:38.822,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,813323}, tap_estimate, {replica_building,"default",927,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26744.0>, <<"replication_building_927_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:38.822,ns_1@10.242.238.88:<0.10635.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26744.0>}, {'ns_1@10.242.238.89',<18124.28110.0>}]) [rebalance:info,2014-08-19T16:50:38.823,ns_1@10.242.238.88:<0.10626.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:38.823,ns_1@10.242.238.88:<0.10626.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 927 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:38.824,ns_1@10.242.238.88:<0.10626.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:38.824,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{927, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:38.828,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{672, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:38.828,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",672, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.10647.1>) [ns_server:debug,2014-08-19T16:50:38.828,ns_1@10.242.238.88:<0.10648.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:38.829,ns_1@10.242.238.88:<0.10648.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:38.829,ns_1@10.242.238.88:<0.10647.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 672 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:38.829,ns_1@10.242.238.88:<0.10653.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 672 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:38.829,ns_1@10.242.238.88:<0.10654.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 672 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:38.834,ns_1@10.242.238.88:<0.10655.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 672 into 'ns_1@10.242.238.89' is <18124.28115.0> [ns_server:debug,2014-08-19T16:50:38.836,ns_1@10.242.238.88:<0.10655.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 672 into 'ns_1@10.242.238.90' is <18125.24366.0> [rebalance:debug,2014-08-19T16:50:38.836,ns_1@10.242.238.88:<0.10647.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 672 is <0.10655.1> [ns_server:debug,2014-08-19T16:50:38.836,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 672) [views:debug,2014-08-19T16:50:38.846,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/373. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:38.846,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",373,active,0} [ns_server:debug,2014-08-19T16:50:38.866,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,857913}, tap_estimate, {replica_building,"default",672,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28115.0>, <<"replication_building_672_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:38.878,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,869733}, tap_estimate, {replica_building,"default",672,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24366.0>, <<"replication_building_672_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:38.879,ns_1@10.242.238.88:<0.10656.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.24366.0>}, {'ns_1@10.242.238.89',<18124.28115.0>}]) [rebalance:info,2014-08-19T16:50:38.879,ns_1@10.242.238.88:<0.10647.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:38.880,ns_1@10.242.238.88:<0.10647.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 672 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:38.880,ns_1@10.242.238.88:<0.10647.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:38.881,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{672, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:38.884,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{416, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:38.884,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",416, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.10668.1>) [ns_server:debug,2014-08-19T16:50:38.885,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 416) [ns_server:debug,2014-08-19T16:50:38.885,ns_1@10.242.238.88:<0.10669.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:38.885,ns_1@10.242.238.88:<0.10669.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:38.885,ns_1@10.242.238.88:<0.10668.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 416 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:38.885,ns_1@10.242.238.88:<0.10674.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 416 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:38.885,ns_1@10.242.238.88:<0.10675.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 416 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:38.889,ns_1@10.242.238.88:<0.10676.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 416 into 'ns_1@10.242.238.90' is <18125.24372.0> [ns_server:debug,2014-08-19T16:50:38.891,ns_1@10.242.238.88:<0.10676.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 416 into 'ns_1@10.242.238.89' is <18124.28134.0> [rebalance:debug,2014-08-19T16:50:38.891,ns_1@10.242.238.88:<0.10668.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 416 is <0.10676.1> [ns_server:debug,2014-08-19T16:50:38.920,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,911179}, tap_estimate, {replica_building,"default",416,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24372.0>, <<"replication_building_416_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:38.934,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,925272}, tap_estimate, {replica_building,"default",416,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28134.0>, <<"replication_building_416_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:38.934,ns_1@10.242.238.88:<0.10677.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.28134.0>}, {'ns_1@10.242.238.90',<18125.24372.0>}]) [rebalance:info,2014-08-19T16:50:38.934,ns_1@10.242.238.88:<0.10668.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:38.935,ns_1@10.242.238.88:<0.10668.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 416 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:38.935,ns_1@10.242.238.88:<0.10668.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:38.936,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{416, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:38.939,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{926, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:38.939,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",926, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.10703.1>) [ns_server:debug,2014-08-19T16:50:38.939,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 926) [ns_server:debug,2014-08-19T16:50:38.940,ns_1@10.242.238.88:<0.10704.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:38.940,ns_1@10.242.238.88:<0.10704.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:38.940,ns_1@10.242.238.88:<0.10703.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 926 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:38.940,ns_1@10.242.238.88:<0.10709.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 926 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:38.940,ns_1@10.242.238.88:<0.10710.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 926 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:38.944,ns_1@10.242.238.88:<0.10711.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 926 into 'ns_1@10.242.238.89' is <18124.28140.0> [ns_server:debug,2014-08-19T16:50:38.947,ns_1@10.242.238.88:<0.10711.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 926 into 'ns_1@10.242.238.91' is <18126.26764.0> [rebalance:debug,2014-08-19T16:50:38.947,ns_1@10.242.238.88:<0.10703.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 926 is <0.10711.1> [ns_server:debug,2014-08-19T16:50:38.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,968915}, tap_estimate, {replica_building,"default",926,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28140.0>, <<"replication_building_926_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:38.989,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452638,980907}, tap_estimate, {replica_building,"default",926,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26764.0>, <<"replication_building_926_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:38.990,ns_1@10.242.238.88:<0.10712.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26764.0>}, {'ns_1@10.242.238.89',<18124.28140.0>}]) [rebalance:info,2014-08-19T16:50:38.990,ns_1@10.242.238.88:<0.10703.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:38.991,ns_1@10.242.238.88:<0.10703.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 926 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:38.991,ns_1@10.242.238.88:<0.10703.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:38.992,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{926, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:38.995,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{671, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:38.995,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",671, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.10724.1>) [ns_server:debug,2014-08-19T16:50:38.995,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 671) [ns_server:debug,2014-08-19T16:50:38.996,ns_1@10.242.238.88:<0.10725.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:38.996,ns_1@10.242.238.88:<0.10725.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:38.996,ns_1@10.242.238.88:<0.10724.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 671 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:38.997,ns_1@10.242.238.88:<0.10730.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 671 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:38.997,ns_1@10.242.238.88:<0.10731.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 671 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:39.000,ns_1@10.242.238.88:<0.10732.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 671 into 'ns_1@10.242.238.89' is <18124.28159.0> [ns_server:debug,2014-08-19T16:50:39.003,ns_1@10.242.238.88:<0.10732.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 671 into 'ns_1@10.242.238.90' is <18125.24391.0> [rebalance:debug,2014-08-19T16:50:39.003,ns_1@10.242.238.88:<0.10724.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 671 is <0.10732.1> [ns_server:debug,2014-08-19T16:50:39.014,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 371. Nacking mccouch update. [views:debug,2014-08-19T16:50:39.014,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/371. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:39.014,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",371,active,0} [ns_server:debug,2014-08-19T16:50:39.016,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,668,902,591,280,825,514,459,148,876,693,565,510,382,1004,927,799, 744,616,433,250,122,978,850,667,539,484,356,901,773,718,590,407,224,952,824, 641,513,458,330,875,692,564,509,381,198,1003,926,798,743,615,432,304,977,849, 666,538,483,172,900,772,717,589,406,278,951,823,640,512,457,146,874,691,563, 508,380,1002,925,797,742,614,431,248,120,976,848,665,537,482,354,899,771,716, 588,405,222,950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741, 613,430,302,975,847,664,536,481,170,898,770,715,587,404,276,949,821,766,638, 455,144,872,689,561,506,378,1000,923,795,740,612,429,246,118,974,846,663,535, 480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871,688,560,505, 377,194,922,794,739,611,428,300,973,845,662,534,479,168,896,768,713,585,402, 274,947,819,764,636,453,142,998,870,687,559,504,376,921,793,738,610,427,244, 116,972,844,661,533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324, 997,869,686,558,503,375,192,920,792,737,609,426,298,971,843,660,532,477,166, 894,711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919, 791,736,608,425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944, 816,761,633,450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,969, 841,658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866, 683,555,500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708, 580,397,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788, 733,605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758, 630,447,136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655, 527,472,344,889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552, 497,186,914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266, 1016,939,811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236, 108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316, 989,861,678,550,495,184,912,784,729,601,418,290,963,835,652,524,469,158,886, 703,575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783, 728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753, 625,442,314,987,859,676,548,493,182,910,782,727,599,416,288,961,833,650,522, 467,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492, 364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389,206,1011, 934,806,751,623,440,312,985,857,674,546,491,180,908,780,725,597,414,286,959, 831,648,520,465,154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907, 596,230,830,519,464,698,387,1009,932,621,310,855,544,489,178,778,723,412,957, 646,880,569,258,803,748,437,126,982,671,360,905,594,228,828,517,462,696,385, 1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567,256,801,746,435, 124,980,669,358,903,592,226,826,515,460,694,383,1005,928,617,306,851,540,485, 174,774,719,408,953,642] [ns_server:debug,2014-08-19T16:50:39.037,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,28539}, tap_estimate, {replica_building,"default",671,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28159.0>, <<"replication_building_671_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:39.045,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,36591}, tap_estimate, {replica_building,"default",671,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24391.0>, <<"replication_building_671_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:39.046,ns_1@10.242.238.88:<0.10733.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.24391.0>}, {'ns_1@10.242.238.89',<18124.28159.0>}]) [rebalance:info,2014-08-19T16:50:39.046,ns_1@10.242.238.88:<0.10724.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:39.046,ns_1@10.242.238.88:<0.10724.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 671 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:39.047,ns_1@10.242.238.88:<0.10724.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:39.047,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{671, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:39.051,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{415, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:39.051,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",415, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.10745.1>) [ns_server:debug,2014-08-19T16:50:39.051,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 415) [ns_server:debug,2014-08-19T16:50:39.051,ns_1@10.242.238.88:<0.10746.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:39.052,ns_1@10.242.238.88:<0.10746.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:39.052,ns_1@10.242.238.88:<0.10745.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 415 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:39.052,ns_1@10.242.238.88:<0.10751.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 415 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:39.052,ns_1@10.242.238.88:<0.10752.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 415 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:39.056,ns_1@10.242.238.88:<0.10753.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 415 into 'ns_1@10.242.238.90' is <18125.24411.0> [ns_server:debug,2014-08-19T16:50:39.057,ns_1@10.242.238.88:<0.10753.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 415 into 'ns_1@10.242.238.89' is <18124.28165.0> [rebalance:debug,2014-08-19T16:50:39.057,ns_1@10.242.238.88:<0.10745.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 415 is <0.10753.1> [views:debug,2014-08-19T16:50:39.073,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/371. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:39.073,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",371,active,0} [ns_server:debug,2014-08-19T16:50:39.086,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,77373}, tap_estimate, {replica_building,"default",415,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24411.0>, <<"replication_building_415_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:39.103,ns_1@10.242.238.88:<0.10754.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.28165.0>}, {'ns_1@10.242.238.90',<18125.24411.0>}]) [rebalance:info,2014-08-19T16:50:39.103,ns_1@10.242.238.88:<0.10745.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:39.103,ns_1@10.242.238.88:<0.10745.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 415 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:39.104,ns_1@10.242.238.88:<0.10745.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:39.104,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{415, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:39.108,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{925, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:39.108,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",925, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.10766.1>) [ns_server:debug,2014-08-19T16:50:39.108,ns_1@10.242.238.88:<0.10767.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:39.108,ns_1@10.242.238.88:<0.10767.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:39.108,ns_1@10.242.238.88:<0.10766.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 925 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [ns_server:debug,2014-08-19T16:50:39.108,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,93634}, tap_estimate, {replica_building,"default",415,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28165.0>, <<"replication_building_415_'ns_1@10.242.238.89'">>} [rebalance:info,2014-08-19T16:50:39.108,ns_1@10.242.238.88:<0.10772.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 925 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:39.109,ns_1@10.242.238.88:<0.10773.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 925 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:39.109,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 925) [ns_server:debug,2014-08-19T16:50:39.112,ns_1@10.242.238.88:<0.10774.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 925 into 'ns_1@10.242.238.89' is <18124.28185.0> [ns_server:debug,2014-08-19T16:50:39.115,ns_1@10.242.238.88:<0.10774.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 925 into 'ns_1@10.242.238.91' is <18126.26784.0> [rebalance:debug,2014-08-19T16:50:39.115,ns_1@10.242.238.88:<0.10766.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 925 is <0.10774.1> [ns_server:debug,2014-08-19T16:50:39.144,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,135630}, tap_estimate, {replica_building,"default",925,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28185.0>, <<"replication_building_925_'ns_1@10.242.238.89'">>} [ns_server:info,2014-08-19T16:50:39.155,ns_1@10.242.238.88:<0.20999.0>:ns_orchestrator:handle_info:428]Skipping janitor in state rebalancing [ns_server:debug,2014-08-19T16:50:39.159,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,150896}, tap_estimate, {replica_building,"default",925,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26784.0>, <<"replication_building_925_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:39.160,ns_1@10.242.238.88:<0.10775.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26784.0>}, {'ns_1@10.242.238.89',<18124.28185.0>}]) [rebalance:info,2014-08-19T16:50:39.160,ns_1@10.242.238.88:<0.10766.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:39.161,ns_1@10.242.238.88:<0.10766.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 925 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:39.161,ns_1@10.242.238.88:<0.10766.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:39.162,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{925, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:39.165,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{670, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:39.165,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",670, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.10800.1>) [ns_server:debug,2014-08-19T16:50:39.165,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 670) [ns_server:debug,2014-08-19T16:50:39.166,ns_1@10.242.238.88:<0.10801.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:39.166,ns_1@10.242.238.88:<0.10801.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:39.166,ns_1@10.242.238.88:<0.10800.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 670 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:39.166,ns_1@10.242.238.88:<0.10808.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 670 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:39.166,ns_1@10.242.238.88:<0.10809.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 670 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:39.170,ns_1@10.242.238.88:<0.10814.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 670 into 'ns_1@10.242.238.89' is <18124.28190.0> [ns_server:debug,2014-08-19T16:50:39.172,ns_1@10.242.238.88:<0.10814.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 670 into 'ns_1@10.242.238.90' is <18125.24430.0> [rebalance:debug,2014-08-19T16:50:39.172,ns_1@10.242.238.88:<0.10800.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 670 is <0.10814.1> [ns_server:debug,2014-08-19T16:50:39.202,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,193134}, tap_estimate, {replica_building,"default",670,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28190.0>, <<"replication_building_670_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:39.215,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,206867}, tap_estimate, {replica_building,"default",670,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24430.0>, <<"replication_building_670_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:39.216,ns_1@10.242.238.88:<0.10815.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.24430.0>}, {'ns_1@10.242.238.89',<18124.28190.0>}]) [rebalance:info,2014-08-19T16:50:39.216,ns_1@10.242.238.88:<0.10800.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:39.217,ns_1@10.242.238.88:<0.10800.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 670 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:39.217,ns_1@10.242.238.88:<0.10800.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:39.218,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{670, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:39.221,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{414, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:39.221,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",414, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.10827.1>) [ns_server:debug,2014-08-19T16:50:39.221,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 414) [ns_server:debug,2014-08-19T16:50:39.222,ns_1@10.242.238.88:<0.10828.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:39.222,ns_1@10.242.238.88:<0.10828.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:39.222,ns_1@10.242.238.88:<0.10827.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 414 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:39.222,ns_1@10.242.238.88:<0.10833.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 414 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:39.222,ns_1@10.242.238.88:<0.10834.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 414 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:39.226,ns_1@10.242.238.88:<0.10835.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 414 into 'ns_1@10.242.238.90' is <18125.24436.0> [ns_server:debug,2014-08-19T16:50:39.228,ns_1@10.242.238.88:<0.10835.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 414 into 'ns_1@10.242.238.89' is <18124.28195.0> [rebalance:debug,2014-08-19T16:50:39.228,ns_1@10.242.238.88:<0.10827.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 414 is <0.10835.1> [ns_server:debug,2014-08-19T16:50:39.256,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 369. Nacking mccouch update. [views:debug,2014-08-19T16:50:39.257,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/369. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:39.257,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",369,active,0} [ns_server:debug,2014-08-19T16:50:39.257,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,248394}, tap_estimate, {replica_building,"default",414,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24436.0>, <<"replication_building_414_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:39.259,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,668,902,591,280,825,514,459,148,693,382,1004,927,799,744,616,433, 250,122,978,850,667,539,484,356,901,773,718,590,407,224,952,824,641,513,458, 330,875,692,564,509,381,198,1003,926,798,743,615,432,304,977,849,666,538,483, 172,900,772,717,589,406,278,951,823,640,512,457,146,874,691,563,508,380,1002, 925,797,742,614,431,248,120,976,848,665,537,482,354,899,771,716,588,405,222, 950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302, 975,847,664,536,481,170,898,770,715,587,404,276,949,821,766,638,455,144,872, 689,561,506,378,1000,923,795,740,612,429,246,118,974,846,663,535,480,352,897, 769,714,586,403,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922, 794,739,611,428,300,973,845,662,534,479,168,896,768,713,585,402,274,947,819, 764,636,453,142,998,870,687,559,504,376,921,793,738,610,427,244,116,972,844, 661,533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324,997,869,686, 558,503,375,192,920,792,737,609,426,298,971,843,660,532,477,166,894,711,583, 400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791,736,608, 425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633, 450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530, 475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500, 372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214, 1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422, 294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136, 992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344, 889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186, 914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939, 811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964, 836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861, 678,550,495,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575, 392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600, 417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753,625,442, 314,987,859,676,548,493,182,910,782,727,599,416,288,961,833,650,522,467,156, 884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909, 781,726,598,415,232,960,832,649,521,466,338,883,700,572,389,206,1011,934,806, 751,623,440,312,985,857,674,546,491,180,908,780,725,597,414,286,959,831,648, 520,465,154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,230, 830,519,464,698,387,1009,932,621,310,855,544,489,178,778,723,412,957,646,880, 569,258,803,748,437,126,982,671,360,905,594,228,828,517,462,696,385,1007,930, 619,308,853,542,487,176,776,721,410,955,644,878,567,256,801,746,435,124,980, 669,358,903,592,226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774, 719,408,953,642,876,565,510] [ns_server:debug,2014-08-19T16:50:39.271,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,262935}, tap_estimate, {replica_building,"default",414,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28195.0>, <<"replication_building_414_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:39.272,ns_1@10.242.238.88:<0.10836.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.28195.0>}, {'ns_1@10.242.238.90',<18125.24436.0>}]) [rebalance:info,2014-08-19T16:50:39.272,ns_1@10.242.238.88:<0.10827.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:39.273,ns_1@10.242.238.88:<0.10827.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 414 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:39.273,ns_1@10.242.238.88:<0.10827.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:39.274,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{414, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:39.277,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{924, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:39.277,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",924, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.10848.1>) [ns_server:debug,2014-08-19T16:50:39.278,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 924) [ns_server:debug,2014-08-19T16:50:39.278,ns_1@10.242.238.88:<0.10849.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:39.278,ns_1@10.242.238.88:<0.10849.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:39.278,ns_1@10.242.238.88:<0.10848.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 924 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:39.279,ns_1@10.242.238.88:<0.10854.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 924 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:39.279,ns_1@10.242.238.88:<0.10855.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 924 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:39.282,ns_1@10.242.238.88:<0.10856.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 924 into 'ns_1@10.242.238.89' is <18124.28215.0> [ns_server:debug,2014-08-19T16:50:39.285,ns_1@10.242.238.88:<0.10856.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 924 into 'ns_1@10.242.238.91' is <18126.26804.0> [rebalance:debug,2014-08-19T16:50:39.285,ns_1@10.242.238.88:<0.10848.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 924 is <0.10856.1> [ns_server:debug,2014-08-19T16:50:39.314,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,305141}, tap_estimate, {replica_building,"default",924,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28215.0>, <<"replication_building_924_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:39.327,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,318603}, tap_estimate, {replica_building,"default",924,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26804.0>, <<"replication_building_924_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:39.328,ns_1@10.242.238.88:<0.10857.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26804.0>}, {'ns_1@10.242.238.89',<18124.28215.0>}]) [rebalance:info,2014-08-19T16:50:39.328,ns_1@10.242.238.88:<0.10848.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:39.329,ns_1@10.242.238.88:<0.10848.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 924 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:39.329,ns_1@10.242.238.88:<0.10848.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:39.330,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{924, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:39.333,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{669, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:39.333,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",669, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.10869.1>) [ns_server:debug,2014-08-19T16:50:39.333,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 669) [ns_server:debug,2014-08-19T16:50:39.334,ns_1@10.242.238.88:<0.10870.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:39.334,ns_1@10.242.238.88:<0.10870.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:39.334,ns_1@10.242.238.88:<0.10869.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 669 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:39.334,ns_1@10.242.238.88:<0.10875.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 669 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:39.334,ns_1@10.242.238.88:<0.10876.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 669 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:39.338,ns_1@10.242.238.88:<0.10877.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 669 into 'ns_1@10.242.238.89' is <18124.28220.0> [views:debug,2014-08-19T16:50:39.340,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/369. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:39.341,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",369,active,0} [ns_server:debug,2014-08-19T16:50:39.342,ns_1@10.242.238.88:<0.10877.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 669 into 'ns_1@10.242.238.90' is <18125.24455.0> [rebalance:debug,2014-08-19T16:50:39.342,ns_1@10.242.238.88:<0.10869.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 669 is <0.10877.1> [ns_server:debug,2014-08-19T16:50:39.370,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,361900}, tap_estimate, {replica_building,"default",669,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28220.0>, <<"replication_building_669_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:39.383,ns_1@10.242.238.88:<0.10878.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.24455.0>}, {'ns_1@10.242.238.89',<18124.28220.0>}]) [rebalance:info,2014-08-19T16:50:39.383,ns_1@10.242.238.88:<0.10869.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:39.384,ns_1@10.242.238.88:<0.10869.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 669 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:39.384,ns_1@10.242.238.88:<0.10869.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:39.384,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,374105}, tap_estimate, {replica_building,"default",669,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24455.0>, <<"replication_building_669_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:39.385,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{669, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:39.388,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{413, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:39.388,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",413, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.10890.1>) [ns_server:debug,2014-08-19T16:50:39.389,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 413) [ns_server:debug,2014-08-19T16:50:39.389,ns_1@10.242.238.88:<0.10891.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:39.389,ns_1@10.242.238.88:<0.10891.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:39.389,ns_1@10.242.238.88:<0.10890.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 413 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:39.389,ns_1@10.242.238.88:<0.10896.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 413 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:39.389,ns_1@10.242.238.88:<0.10897.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 413 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:39.393,ns_1@10.242.238.88:<0.10898.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 413 into 'ns_1@10.242.238.90' is <18125.24461.0> [ns_server:debug,2014-08-19T16:50:39.395,ns_1@10.242.238.88:<0.10898.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 413 into 'ns_1@10.242.238.89' is <18124.28225.0> [rebalance:debug,2014-08-19T16:50:39.396,ns_1@10.242.238.88:<0.10890.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 413 is <0.10898.1> [ns_server:debug,2014-08-19T16:50:39.426,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,417022}, tap_estimate, {replica_building,"default",413,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24461.0>, <<"replication_building_413_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:39.438,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,429123}, tap_estimate, {replica_building,"default",413,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28225.0>, <<"replication_building_413_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:39.438,ns_1@10.242.238.88:<0.10899.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.28225.0>}, {'ns_1@10.242.238.90',<18125.24461.0>}]) [rebalance:info,2014-08-19T16:50:39.438,ns_1@10.242.238.88:<0.10890.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:39.439,ns_1@10.242.238.88:<0.10890.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 413 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:39.439,ns_1@10.242.238.88:<0.10890.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:39.440,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{413, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:39.443,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{923, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:39.443,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",923, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.10925.1>) [ns_server:debug,2014-08-19T16:50:39.443,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 923) [ns_server:debug,2014-08-19T16:50:39.444,ns_1@10.242.238.88:<0.10926.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:39.444,ns_1@10.242.238.88:<0.10926.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:39.444,ns_1@10.242.238.88:<0.10925.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 923 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:39.444,ns_1@10.242.238.88:<0.10931.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 923 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:39.444,ns_1@10.242.238.88:<0.10932.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 923 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:39.448,ns_1@10.242.238.88:<0.10933.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 923 into 'ns_1@10.242.238.89' is <18124.28231.0> [ns_server:debug,2014-08-19T16:50:39.450,ns_1@10.242.238.88:<0.10933.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 923 into 'ns_1@10.242.238.91' is <18126.26824.0> [rebalance:debug,2014-08-19T16:50:39.450,ns_1@10.242.238.88:<0.10925.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 923 is <0.10933.1> [ns_server:debug,2014-08-19T16:50:39.461,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 367. Nacking mccouch update. [views:debug,2014-08-19T16:50:39.462,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/367. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:39.462,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",367,active,0} [ns_server:debug,2014-08-19T16:50:39.464,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,668,902,591,280,825,514,459,148,693,382,1004,927,799,744,616,433, 250,122,978,850,667,539,484,356,901,773,718,590,407,224,952,824,641,513,458, 330,875,692,564,509,381,198,1003,926,798,743,615,432,304,977,849,666,538,483, 172,900,772,717,589,406,278,951,823,640,512,457,146,874,691,563,508,380,1002, 925,797,742,614,431,248,120,976,848,665,537,482,354,899,771,716,588,405,222, 950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302, 975,847,664,536,481,170,898,770,715,587,404,276,949,821,766,638,455,144,872, 689,561,506,378,1000,923,795,740,612,429,246,118,974,846,663,535,480,352,897, 769,714,586,403,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922, 794,739,611,428,300,973,845,662,534,479,168,896,768,713,585,402,274,947,819, 764,636,453,142,998,870,687,559,504,376,921,793,738,610,427,244,116,972,844, 661,533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324,997,869,686, 558,503,375,192,920,792,737,609,426,298,971,843,660,532,477,166,894,711,583, 400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791,736,608, 425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633, 450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530, 475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500, 372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214, 1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422, 294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136, 992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344, 889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186, 914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939, 811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964, 836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703, 575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728, 600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753,625, 442,314,987,859,676,548,493,182,910,782,727,599,416,288,961,833,650,522,467, 156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364, 909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389,206,1011,934, 806,751,623,440,312,985,857,674,546,491,180,908,780,725,597,414,286,959,831, 648,520,465,154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596, 230,830,519,464,698,387,1009,932,621,310,855,544,489,178,778,723,412,957,646, 880,569,258,803,748,437,126,982,671,360,905,594,228,828,517,462,696,385,1007, 930,619,308,853,542,487,176,776,721,410,955,644,878,567,256,801,746,435,124, 980,669,358,903,592,226,826,515,460,694,383,1005,928,617,306,851,540,485,174, 774,719,408,953,642,876,565,510] [ns_server:debug,2014-08-19T16:50:39.478,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,469744}, tap_estimate, {replica_building,"default",923,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28231.0>, <<"replication_building_923_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:39.491,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,482832}, tap_estimate, {replica_building,"default",923,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26824.0>, <<"replication_building_923_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:39.492,ns_1@10.242.238.88:<0.10934.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26824.0>}, {'ns_1@10.242.238.89',<18124.28231.0>}]) [rebalance:info,2014-08-19T16:50:39.492,ns_1@10.242.238.88:<0.10925.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:39.493,ns_1@10.242.238.88:<0.10925.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 923 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:39.493,ns_1@10.242.238.88:<0.10925.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:39.494,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{923, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:39.497,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{668, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:39.497,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",668, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.10946.1>) [ns_server:debug,2014-08-19T16:50:39.498,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 668) [ns_server:debug,2014-08-19T16:50:39.498,ns_1@10.242.238.88:<0.10947.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:39.498,ns_1@10.242.238.88:<0.10947.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:39.498,ns_1@10.242.238.88:<0.10946.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 668 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:39.498,ns_1@10.242.238.88:<0.10952.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 668 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:39.498,ns_1@10.242.238.88:<0.10953.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 668 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:39.503,ns_1@10.242.238.88:<0.10954.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 668 into 'ns_1@10.242.238.89' is <18124.28250.0> [ns_server:debug,2014-08-19T16:50:39.506,ns_1@10.242.238.88:<0.10954.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 668 into 'ns_1@10.242.238.90' is <18125.24486.0> [rebalance:debug,2014-08-19T16:50:39.506,ns_1@10.242.238.88:<0.10946.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 668 is <0.10954.1> [views:debug,2014-08-19T16:50:39.521,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/367. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:39.521,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",367,active,0} [ns_server:debug,2014-08-19T16:50:39.534,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,525887}, tap_estimate, {replica_building,"default",668,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28250.0>, <<"replication_building_668_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:39.547,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,538799}, tap_estimate, {replica_building,"default",668,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24486.0>, <<"replication_building_668_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:39.548,ns_1@10.242.238.88:<0.10955.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.24486.0>}, {'ns_1@10.242.238.89',<18124.28250.0>}]) [rebalance:info,2014-08-19T16:50:39.548,ns_1@10.242.238.88:<0.10946.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:39.549,ns_1@10.242.238.88:<0.10946.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 668 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:39.549,ns_1@10.242.238.88:<0.10946.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:39.549,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{668, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:39.553,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{412, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:39.553,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",412, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.10967.1>) [ns_server:debug,2014-08-19T16:50:39.553,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 412) [ns_server:debug,2014-08-19T16:50:39.553,ns_1@10.242.238.88:<0.10968.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:39.554,ns_1@10.242.238.88:<0.10968.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:39.554,ns_1@10.242.238.88:<0.10967.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 412 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:39.554,ns_1@10.242.238.88:<0.10973.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 412 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:39.554,ns_1@10.242.238.88:<0.10974.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 412 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:39.558,ns_1@10.242.238.88:<0.10975.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 412 into 'ns_1@10.242.238.90' is <18125.24493.0> [ns_server:debug,2014-08-19T16:50:39.560,ns_1@10.242.238.88:<0.10975.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 412 into 'ns_1@10.242.238.89' is <18124.28255.0> [rebalance:debug,2014-08-19T16:50:39.560,ns_1@10.242.238.88:<0.10967.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 412 is <0.10975.1> [ns_server:debug,2014-08-19T16:50:39.590,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,581675}, tap_estimate, {replica_building,"default",412,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24493.0>, <<"replication_building_412_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:39.602,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,593887}, tap_estimate, {replica_building,"default",412,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28255.0>, <<"replication_building_412_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:39.603,ns_1@10.242.238.88:<0.10984.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.28255.0>}, {'ns_1@10.242.238.90',<18125.24493.0>}]) [rebalance:info,2014-08-19T16:50:39.603,ns_1@10.242.238.88:<0.10967.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:39.604,ns_1@10.242.238.88:<0.10967.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 412 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:39.604,ns_1@10.242.238.88:<0.10967.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:39.605,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{412, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:39.608,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{922, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:39.608,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",922, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.11002.1>) [ns_server:debug,2014-08-19T16:50:39.608,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 922) [ns_server:debug,2014-08-19T16:50:39.609,ns_1@10.242.238.88:<0.11003.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:39.609,ns_1@10.242.238.88:<0.11003.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:39.609,ns_1@10.242.238.88:<0.11002.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 922 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:39.609,ns_1@10.242.238.88:<0.11008.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 922 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:39.609,ns_1@10.242.238.88:<0.11009.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 922 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:39.613,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 365. Nacking mccouch update. [views:debug,2014-08-19T16:50:39.613,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/365. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:39.613,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",365,active,0} [ns_server:debug,2014-08-19T16:50:39.613,ns_1@10.242.238.88:<0.11010.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 922 into 'ns_1@10.242.238.89' is <18124.28261.0> [ns_server:debug,2014-08-19T16:50:39.615,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,668,902,591,280,825,514,459,148,693,382,1004,927,799,744,616,433, 250,122,978,850,667,539,484,356,901,773,718,590,407,224,952,824,641,513,458, 330,875,692,564,509,381,198,1003,926,798,743,615,432,304,977,849,666,538,483, 172,900,772,717,589,406,278,951,823,640,512,457,146,874,691,563,508,380,1002, 925,797,742,614,431,248,120,976,848,665,537,482,354,899,771,716,588,405,222, 950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302, 975,847,664,536,481,170,898,770,715,587,404,276,949,821,766,638,455,144,872, 689,561,506,378,1000,923,795,740,612,429,246,118,974,846,663,535,480,352,897, 769,714,586,403,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922, 794,739,611,428,300,973,845,662,534,479,168,896,768,713,585,402,274,947,819, 764,636,453,142,998,870,687,559,504,376,921,793,738,610,427,244,116,972,844, 661,533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324,997,869,686, 558,503,375,192,920,792,737,609,426,298,971,843,660,532,477,166,894,711,583, 400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791,736,608, 425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633, 450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530, 475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500, 372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214, 1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422, 294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136, 992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344, 889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186, 914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939, 811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964, 836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703, 575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728, 600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753,625, 442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833,650,522, 467,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492, 364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389,206,1011, 934,806,751,623,440,312,985,857,674,546,491,180,908,780,725,597,414,286,959, 831,648,520,465,154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907, 596,230,830,519,464,698,387,1009,932,621,310,855,544,489,178,778,723,412,957, 646,880,569,258,803,748,437,126,982,671,360,905,594,228,828,517,462,696,385, 1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567,256,801,746,435, 124,980,669,358,903,592,226,826,515,460,694,383,1005,928,617,306,851,540,485, 174,774,719,408,953,642,876,565,510] [ns_server:debug,2014-08-19T16:50:39.615,ns_1@10.242.238.88:<0.11010.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 922 into 'ns_1@10.242.238.91' is <18126.26858.0> [rebalance:debug,2014-08-19T16:50:39.616,ns_1@10.242.238.88:<0.11002.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 922 is <0.11010.1> [ns_server:debug,2014-08-19T16:50:39.643,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,634608}, tap_estimate, {replica_building,"default",922,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28261.0>, <<"replication_building_922_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:39.658,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,649340}, tap_estimate, {replica_building,"default",922,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26858.0>, <<"replication_building_922_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:39.658,ns_1@10.242.238.88:<0.11011.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26858.0>}, {'ns_1@10.242.238.89',<18124.28261.0>}]) [rebalance:info,2014-08-19T16:50:39.659,ns_1@10.242.238.88:<0.11002.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:39.659,ns_1@10.242.238.88:<0.11002.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 922 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:39.660,ns_1@10.242.238.88:<0.11002.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:39.660,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{922, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [views:debug,2014-08-19T16:50:39.663,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/365. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:39.663,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",365,active,0} [ns_server:debug,2014-08-19T16:50:39.664,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{667, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:39.664,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",667, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.11023.1>) [ns_server:debug,2014-08-19T16:50:39.665,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 667) [ns_server:debug,2014-08-19T16:50:39.665,ns_1@10.242.238.88:<0.11024.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:39.665,ns_1@10.242.238.88:<0.11024.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:39.665,ns_1@10.242.238.88:<0.11023.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 667 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:39.665,ns_1@10.242.238.88:<0.11029.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 667 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:39.665,ns_1@10.242.238.88:<0.11030.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 667 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:39.669,ns_1@10.242.238.88:<0.11031.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 667 into 'ns_1@10.242.238.89' is <18124.28280.0> [ns_server:debug,2014-08-19T16:50:39.672,ns_1@10.242.238.88:<0.11031.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 667 into 'ns_1@10.242.238.90' is <18125.24498.0> [rebalance:debug,2014-08-19T16:50:39.672,ns_1@10.242.238.88:<0.11023.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 667 is <0.11031.1> [ns_server:debug,2014-08-19T16:50:39.700,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,691788}, tap_estimate, {replica_building,"default",667,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28280.0>, <<"replication_building_667_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:39.714,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,705118}, tap_estimate, {replica_building,"default",667,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24498.0>, <<"replication_building_667_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:39.714,ns_1@10.242.238.88:<0.11032.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.24498.0>}, {'ns_1@10.242.238.89',<18124.28280.0>}]) [rebalance:info,2014-08-19T16:50:39.715,ns_1@10.242.238.88:<0.11023.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:39.715,ns_1@10.242.238.88:<0.11023.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 667 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:39.716,ns_1@10.242.238.88:<0.11023.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:39.716,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{667, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:39.719,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{411, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:39.720,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",411, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.11058.1>) [ns_server:debug,2014-08-19T16:50:39.720,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 411) [ns_server:debug,2014-08-19T16:50:39.720,ns_1@10.242.238.88:<0.11059.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:39.720,ns_1@10.242.238.88:<0.11059.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:39.720,ns_1@10.242.238.88:<0.11058.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 411 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:39.720,ns_1@10.242.238.88:<0.11064.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 411 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:39.721,ns_1@10.242.238.88:<0.11065.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 411 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:39.724,ns_1@10.242.238.88:<0.11066.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 411 into 'ns_1@10.242.238.90' is <18125.24518.0> [rebalance:info,2014-08-19T16:50:39.725,ns_1@10.242.238.88:<0.9904.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 936 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:39.725,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 936 state to active [rebalance:info,2014-08-19T16:50:39.726,ns_1@10.242.238.88:<0.9904.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 936 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:39.727,ns_1@10.242.238.88:<0.9904.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:39.727,ns_1@10.242.238.88:<0.11066.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 411 into 'ns_1@10.242.238.89' is <18124.28285.0> [rebalance:debug,2014-08-19T16:50:39.727,ns_1@10.242.238.88:<0.11058.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 411 is <0.11066.1> [ns_server:debug,2014-08-19T16:50:39.739,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 363. Nacking mccouch update. [views:debug,2014-08-19T16:50:39.740,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/363. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:39.740,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",363,active,0} [ns_server:debug,2014-08-19T16:50:39.742,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722,411, 956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150,695, 384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200,800, 745,434,979,668,902,591,280,825,514,459,148,693,382,1004,927,799,744,616,433, 250,122,978,850,667,539,484,356,901,773,718,590,407,224,952,824,641,513,458, 330,875,692,564,509,381,198,1003,926,798,743,615,432,304,977,849,666,538,483, 172,900,772,717,589,406,278,951,823,640,512,457,146,874,691,563,508,380,1002, 925,797,742,614,431,248,120,976,848,665,537,482,354,899,771,716,588,405,222, 950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302, 975,847,664,536,481,170,898,770,715,587,404,276,949,821,766,638,455,144,872, 689,561,506,378,1000,923,795,740,612,429,246,118,974,846,663,535,480,352,897, 769,714,586,403,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922, 794,739,611,428,300,973,845,662,534,479,168,896,768,713,585,402,274,947,819, 764,636,453,142,998,870,687,559,504,376,921,793,738,610,427,244,116,972,844, 661,533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324,997,869,686, 558,503,375,192,920,792,737,609,426,298,971,843,660,532,477,166,894,711,583, 400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791,736,608, 425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633, 450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530, 475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500, 372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214, 1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422, 294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136, 992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344, 889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186, 914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939, 811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964, 836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703, 575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728, 600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753,625, 442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833,650,522, 467,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492, 364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389,206,1011, 934,806,751,623,440,312,985,857,674,546,491,363,180,908,780,725,597,414,286, 959,831,648,520,465,154,882,699,571,388,260,1010,805,750,439,128,984,673,362, 907,596,230,830,519,464,698,387,1009,932,621,310,855,544,489,178,778,723,412, 957,646,880,569,258,803,748,437,126,982,671,360,905,594,228,828,517,462,696, 385,1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567,256,801,746, 435,124,980,669,358,903,592,226,826,515,460,694,383,1005,928,617,306,851,540, 485,174,774,719,408,953,642,876,565,510] [ns_server:debug,2014-08-19T16:50:39.756,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,747385}, tap_estimate, {replica_building,"default",411,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24518.0>, <<"replication_building_411_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:39.769,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,760639}, tap_estimate, {replica_building,"default",411,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28285.0>, <<"replication_building_411_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:39.770,ns_1@10.242.238.88:<0.11071.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.28285.0>}, {'ns_1@10.242.238.90',<18125.24518.0>}]) [rebalance:info,2014-08-19T16:50:39.770,ns_1@10.242.238.88:<0.11058.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:39.771,ns_1@10.242.238.88:<0.11058.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 411 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:39.771,ns_1@10.242.238.88:<0.11058.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:39.771,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{411, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [views:debug,2014-08-19T16:50:39.773,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/363. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:39.773,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",363,active,0} [ns_server:debug,2014-08-19T16:50:39.775,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{921, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:39.775,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",921, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.11083.1>) [ns_server:debug,2014-08-19T16:50:39.775,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 921) [ns_server:debug,2014-08-19T16:50:39.775,ns_1@10.242.238.88:<0.11084.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:39.776,ns_1@10.242.238.88:<0.11084.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:39.776,ns_1@10.242.238.88:<0.11083.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 921 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:39.776,ns_1@10.242.238.88:<0.11089.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 921 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:39.776,ns_1@10.242.238.88:<0.11090.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 921 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:39.779,ns_1@10.242.238.88:<0.11091.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 921 into 'ns_1@10.242.238.89' is <18124.28308.0> [ns_server:debug,2014-08-19T16:50:39.782,ns_1@10.242.238.88:<0.11091.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 921 into 'ns_1@10.242.238.91' is <18126.26867.0> [rebalance:debug,2014-08-19T16:50:39.782,ns_1@10.242.238.88:<0.11083.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 921 is <0.11091.1> [ns_server:debug,2014-08-19T16:50:39.811,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,802888}, tap_estimate, {replica_building,"default",921,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28308.0>, <<"replication_building_921_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:39.824,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,815101}, tap_estimate, {replica_building,"default",921,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26867.0>, <<"replication_building_921_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:39.824,ns_1@10.242.238.88:<0.11092.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26867.0>}, {'ns_1@10.242.238.89',<18124.28308.0>}]) [rebalance:info,2014-08-19T16:50:39.824,ns_1@10.242.238.88:<0.11083.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:39.825,ns_1@10.242.238.88:<0.11083.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 921 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:39.825,ns_1@10.242.238.88:<0.11083.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:39.826,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{921, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:39.829,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{666, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:39.829,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",666, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.11118.1>) [ns_server:debug,2014-08-19T16:50:39.830,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 666) [ns_server:debug,2014-08-19T16:50:39.830,ns_1@10.242.238.88:<0.11119.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:39.830,ns_1@10.242.238.88:<0.11119.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:39.831,ns_1@10.242.238.88:<0.11118.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 666 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:39.831,ns_1@10.242.238.88:<0.11124.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 666 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:39.831,ns_1@10.242.238.88:<0.11125.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 666 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:39.835,ns_1@10.242.238.88:<0.11126.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 666 into 'ns_1@10.242.238.89' is <18124.28314.0> [ns_server:debug,2014-08-19T16:50:39.837,ns_1@10.242.238.88:<0.11126.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 666 into 'ns_1@10.242.238.90' is <18125.24523.0> [rebalance:debug,2014-08-19T16:50:39.838,ns_1@10.242.238.88:<0.11118.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 666 is <0.11126.1> [ns_server:debug,2014-08-19T16:50:39.849,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 361. Nacking mccouch update. [views:debug,2014-08-19T16:50:39.849,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/361. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:39.849,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",361,active,0} [ns_server:debug,2014-08-19T16:50:39.851,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,904,593,282,827,516,461,150, 695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511,200, 800,745,434,979,668,902,591,280,825,514,459,148,693,382,1004,927,799,744,616, 433,250,122,978,850,667,539,484,356,901,773,718,590,407,224,952,824,641,513, 458,330,875,692,564,509,381,198,1003,926,798,743,615,432,304,977,849,666,538, 483,172,900,772,717,589,406,278,951,823,640,512,457,146,874,691,563,508,380, 1002,925,797,742,614,431,248,120,976,848,665,537,482,354,899,771,716,588,405, 222,950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430, 302,975,847,664,536,481,170,898,770,715,587,404,276,949,821,766,638,455,144, 872,689,561,506,378,1000,923,795,740,612,429,246,118,974,846,663,535,480,352, 897,769,714,586,403,220,948,820,765,637,454,326,999,871,688,560,505,377,194, 922,794,739,611,428,300,973,845,662,534,479,168,896,768,713,585,402,274,947, 819,764,636,453,142,998,870,687,559,504,376,921,793,738,610,427,244,116,972, 844,661,533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324,997,869, 686,558,503,375,192,920,792,737,609,426,298,971,843,660,532,477,166,894,711, 583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791,736, 608,425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761, 633,450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,969,841,658, 530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555, 500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397, 214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605, 422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447, 136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472, 344,889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369, 186,914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016, 939,811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108, 964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989, 861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,158,886, 703,575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783, 728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753, 625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833,650, 522,467,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547, 492,364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389,206, 1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908,780,725,597,414, 286,959,831,648,520,465,154,882,699,571,388,260,1010,805,750,439,128,984,673, 362,907,596,230,830,519,464,698,387,1009,932,621,310,855,544,489,178,778,723, 412,957,646,880,569,258,803,748,437,126,982,671,360,905,594,228,828,517,462, 696,385,1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567,256,801, 746,435,124,980,669,358,903,592,226,826,515,460,694,383,1005,928,617,306,851, 540,485,174,774,719,408,953,642,876,565,510] [ns_server:debug,2014-08-19T16:50:39.875,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,866774}, tap_estimate, {replica_building,"default",666,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28314.0>, <<"replication_building_666_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:39.880,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,871041}, tap_estimate, {replica_building,"default",666,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24523.0>, <<"replication_building_666_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:39.880,ns_1@10.242.238.88:<0.11127.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.24523.0>}, {'ns_1@10.242.238.89',<18124.28314.0>}]) [rebalance:info,2014-08-19T16:50:39.880,ns_1@10.242.238.88:<0.11118.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:39.881,ns_1@10.242.238.88:<0.11118.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 666 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:39.881,ns_1@10.242.238.88:<0.11118.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:39.882,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{666, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [views:debug,2014-08-19T16:50:39.882,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/361. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:39.883,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",361,active,0} [ns_server:debug,2014-08-19T16:50:39.886,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{410, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:39.886,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",410, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.11139.1>) [ns_server:debug,2014-08-19T16:50:39.886,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 410) [ns_server:debug,2014-08-19T16:50:39.886,ns_1@10.242.238.88:<0.11140.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:39.887,ns_1@10.242.238.88:<0.11140.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:39.887,ns_1@10.242.238.88:<0.11139.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 410 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:39.887,ns_1@10.242.238.88:<0.11145.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 410 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:39.887,ns_1@10.242.238.88:<0.11146.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 410 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:39.890,ns_1@10.242.238.88:<0.11147.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 410 into 'ns_1@10.242.238.90' is <18125.24543.0> [ns_server:debug,2014-08-19T16:50:39.893,ns_1@10.242.238.88:<0.11147.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 410 into 'ns_1@10.242.238.89' is <18124.28349.0> [rebalance:debug,2014-08-19T16:50:39.893,ns_1@10.242.238.88:<0.11139.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 410 is <0.11147.1> [ns_server:debug,2014-08-19T16:50:39.921,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,912392}, tap_estimate, {replica_building,"default",410,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24543.0>, <<"replication_building_410_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:39.938,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,929922}, tap_estimate, {replica_building,"default",410,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28349.0>, <<"replication_building_410_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:39.939,ns_1@10.242.238.88:<0.11148.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.28349.0>}, {'ns_1@10.242.238.90',<18125.24543.0>}]) [rebalance:info,2014-08-19T16:50:39.939,ns_1@10.242.238.88:<0.11139.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:39.940,ns_1@10.242.238.88:<0.11139.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 410 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:39.940,ns_1@10.242.238.88:<0.11139.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:39.941,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{410, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:39.945,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{920, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:39.945,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",920, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.11162.1>) [ns_server:debug,2014-08-19T16:50:39.946,ns_1@10.242.238.88:<0.11164.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:39.946,ns_1@10.242.238.88:<0.11164.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:39.946,ns_1@10.242.238.88:<0.11162.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 920 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:39.946,ns_1@10.242.238.88:<0.11171.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 920 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:39.946,ns_1@10.242.238.88:<0.11172.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 920 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:39.952,ns_1@10.242.238.88:<0.11176.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 920 into 'ns_1@10.242.238.89' is <18124.28357.0> [ns_server:debug,2014-08-19T16:50:39.954,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 920) [ns_server:debug,2014-08-19T16:50:39.955,ns_1@10.242.238.88:<0.11176.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 920 into 'ns_1@10.242.238.91' is <18126.26887.0> [rebalance:debug,2014-08-19T16:50:39.956,ns_1@10.242.238.88:<0.11162.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 920 is <0.11176.1> [ns_server:debug,2014-08-19T16:50:39.989,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,980146}, tap_estimate, {replica_building,"default",920,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28357.0>, <<"replication_building_920_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:39.999,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 359. Nacking mccouch update. [views:debug,2014-08-19T16:50:39.999,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/359. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:39.999,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",359,active,0} [ns_server:debug,2014-08-19T16:50:39.999,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452639,990822}, tap_estimate, {replica_building,"default",920,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26887.0>, <<"replication_building_920_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:40.000,ns_1@10.242.238.88:<0.11183.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26887.0>}, {'ns_1@10.242.238.89',<18124.28357.0>}]) [rebalance:info,2014-08-19T16:50:40.000,ns_1@10.242.238.88:<0.11162.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:40.001,ns_1@10.242.238.88:<0.11162.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 920 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:40.001,ns_1@10.242.238.88:<0.11162.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:40.001,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,902,591,280,825,514,459,148,693,382,1004,927,616,250, 978,850,667,539,484,356,901,773,718,590,407,224,952,824,641,513,458,330,875, 692,564,509,381,198,1003,926,798,743,615,432,304,977,849,666,538,483,172,900, 772,717,589,406,278,951,823,640,512,457,146,874,691,563,508,380,1002,925,797, 742,614,431,248,120,976,848,665,537,482,354,899,771,716,588,405,222,950,822, 767,639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302,975,847, 664,536,481,170,898,770,715,587,404,276,949,821,766,638,455,144,872,689,561, 506,378,1000,923,795,740,612,429,246,118,974,846,663,535,480,352,897,769,714, 586,403,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739, 611,428,300,973,845,662,534,479,168,896,768,713,585,402,274,947,819,764,636, 453,142,998,870,687,559,504,376,921,793,738,610,427,244,116,972,844,661,533, 478,350,895,712,584,401,218,1023,946,818,763,635,452,324,997,869,686,558,503, 375,192,920,792,737,609,426,298,971,843,660,532,477,166,894,711,583,400,272, 1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791,736,608,425,242, 114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322, 995,867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,164, 892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500,372,917, 789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942, 814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967, 839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136,992,864, 681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889,706, 578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786, 731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811,756, 628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964,836,653, 525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861,678,550, 495,367,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575,392, 264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600,417, 234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753,625,442,314, 987,859,676,548,493,365,182,910,782,727,599,416,288,961,833,650,522,467,156, 884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364,909, 781,726,598,415,232,960,832,649,521,466,338,883,700,572,389,206,1011,934,806, 751,623,440,312,985,857,674,546,491,363,180,908,780,725,597,414,286,959,831, 648,520,465,154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596, 230,830,519,464,698,387,1009,932,621,310,855,544,489,178,778,723,412,957,646, 880,569,258,803,748,437,126,982,671,360,905,594,228,828,517,462,696,385,1007, 930,619,308,853,542,487,176,776,721,410,955,644,878,567,256,801,746,435,124, 980,669,358,903,592,226,826,515,460,694,383,1005,928,617,306,851,540,485,174, 774,719,408,953,642,876,565,510,799,744,433,122] [ns_server:debug,2014-08-19T16:50:40.002,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{920, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:40.006,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{665, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:40.006,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",665, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.11195.1>) [ns_server:debug,2014-08-19T16:50:40.006,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 665) [ns_server:debug,2014-08-19T16:50:40.006,ns_1@10.242.238.88:<0.11196.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:40.006,ns_1@10.242.238.88:<0.11196.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:40.007,ns_1@10.242.238.88:<0.11195.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 665 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:40.007,ns_1@10.242.238.88:<0.11201.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 665 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:40.007,ns_1@10.242.238.88:<0.11202.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 665 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:40.011,ns_1@10.242.238.88:<0.11203.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 665 into 'ns_1@10.242.238.89' is <18124.28364.0> [ns_server:debug,2014-08-19T16:50:40.014,ns_1@10.242.238.88:<0.11203.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 665 into 'ns_1@10.242.238.90' is <18125.24548.0> [rebalance:debug,2014-08-19T16:50:40.014,ns_1@10.242.238.88:<0.11195.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 665 is <0.11203.1> [ns_server:debug,2014-08-19T16:50:40.056,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,47086}, tap_estimate, {replica_building,"default",665,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28364.0>, <<"replication_building_665_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:40.058,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,49531}, tap_estimate, {replica_building,"default",665,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24548.0>, <<"replication_building_665_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:40.058,ns_1@10.242.238.88:<0.11204.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.24548.0>}, {'ns_1@10.242.238.89',<18124.28364.0>}]) [rebalance:info,2014-08-19T16:50:40.059,ns_1@10.242.238.88:<0.11195.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:40.059,ns_1@10.242.238.88:<0.11195.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 665 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:40.060,ns_1@10.242.238.88:<0.11195.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:40.060,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{665, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:40.063,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{409, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:40.064,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",409, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.11216.1>) [ns_server:debug,2014-08-19T16:50:40.064,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 409) [ns_server:debug,2014-08-19T16:50:40.064,ns_1@10.242.238.88:<0.11217.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:40.064,ns_1@10.242.238.88:<0.11217.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:40.064,ns_1@10.242.238.88:<0.11216.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 409 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:40.064,ns_1@10.242.238.88:<0.11222.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 409 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:40.064,ns_1@10.242.238.88:<0.11223.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 409 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [views:debug,2014-08-19T16:50:40.083,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/359. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:40.083,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",359,active,0} [ns_server:debug,2014-08-19T16:50:40.146,ns_1@10.242.238.88:<0.11224.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 409 into 'ns_1@10.242.238.90' is <18125.24568.0> [ns_server:debug,2014-08-19T16:50:40.149,ns_1@10.242.238.88:<0.11224.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 409 into 'ns_1@10.242.238.89' is <18124.28383.0> [rebalance:debug,2014-08-19T16:50:40.149,ns_1@10.242.238.88:<0.11216.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 409 is <0.11224.1> [ns_server:debug,2014-08-19T16:50:40.178,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,169366}, tap_estimate, {replica_building,"default",409,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24568.0>, <<"replication_building_409_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:40.195,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,186639}, tap_estimate, {replica_building,"default",409,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28383.0>, <<"replication_building_409_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:40.196,ns_1@10.242.238.88:<0.11225.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.28383.0>}, {'ns_1@10.242.238.90',<18125.24568.0>}]) [rebalance:info,2014-08-19T16:50:40.196,ns_1@10.242.238.88:<0.11216.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:40.197,ns_1@10.242.238.88:<0.11216.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 409 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:40.197,ns_1@10.242.238.88:<0.11216.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:40.198,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{409, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:40.201,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{919, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:40.201,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",919, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.11251.1>) [ns_server:debug,2014-08-19T16:50:40.201,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 919) [ns_server:debug,2014-08-19T16:50:40.202,ns_1@10.242.238.88:<0.11252.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:40.202,ns_1@10.242.238.88:<0.11252.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:40.202,ns_1@10.242.238.88:<0.11251.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 919 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:40.202,ns_1@10.242.238.88:<0.11257.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 919 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:40.202,ns_1@10.242.238.88:<0.11258.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 919 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:40.206,ns_1@10.242.238.88:<0.11259.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 919 into 'ns_1@10.242.238.89' is <18124.28389.0> [ns_server:debug,2014-08-19T16:50:40.209,ns_1@10.242.238.88:<0.11259.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 919 into 'ns_1@10.242.238.91' is <18126.26921.0> [rebalance:debug,2014-08-19T16:50:40.209,ns_1@10.242.238.88:<0.11251.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 919 is <0.11259.1> [ns_server:debug,2014-08-19T16:50:40.237,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,228403}, tap_estimate, {replica_building,"default",919,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28389.0>, <<"replication_building_919_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:40.251,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,242225}, tap_estimate, {replica_building,"default",919,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26921.0>, <<"replication_building_919_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:40.251,ns_1@10.242.238.88:<0.11260.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26921.0>}, {'ns_1@10.242.238.89',<18124.28389.0>}]) [rebalance:info,2014-08-19T16:50:40.251,ns_1@10.242.238.88:<0.11251.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:40.252,ns_1@10.242.238.88:<0.11251.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 919 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:40.252,ns_1@10.242.238.88:<0.11251.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:40.253,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{919, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:40.257,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{664, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:40.257,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",664, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.11272.1>) [ns_server:debug,2014-08-19T16:50:40.258,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 357. Nacking mccouch update. [views:debug,2014-08-19T16:50:40.258,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/357. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:40.258,ns_1@10.242.238.88:<0.11273.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:40.258,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",357,active,0} [ns_server:debug,2014-08-19T16:50:40.258,ns_1@10.242.238.88:<0.11273.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:40.258,ns_1@10.242.238.88:<0.11272.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 664 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:40.259,ns_1@10.242.238.88:<0.11278.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 664 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:40.259,ns_1@10.242.238.88:<0.11279.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 664 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:40.260,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,978,850,667,539,484,356,901,773,718,590,407,224,952,824,641,513,458,330, 875,692,564,509,381,198,1003,926,798,743,615,432,304,977,849,666,538,483,172, 900,772,717,589,406,278,951,823,640,512,457,146,874,691,563,508,380,1002,925, 797,742,614,431,248,120,976,848,665,537,482,354,899,771,716,588,405,222,950, 822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302,975, 847,664,536,481,170,898,770,715,587,404,276,949,821,766,638,455,144,872,689, 561,506,378,1000,923,795,740,612,429,246,118,974,846,663,535,480,352,897,769, 714,586,403,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922,794, 739,611,428,300,973,845,662,534,479,168,896,768,713,585,402,274,947,819,764, 636,453,142,998,870,687,559,504,376,921,793,738,610,427,244,116,972,844,661, 533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324,997,869,686,558, 503,375,192,920,792,737,609,426,298,971,843,660,532,477,166,894,711,583,400, 272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791,736,608,425, 242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450, 322,995,867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530,475, 164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500,372, 917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019, 942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294, 967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136,992, 864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939,811, 756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964,836, 653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861,678, 550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703,575, 392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600, 417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753,625,442, 314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833,650,522,467, 156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492,364, 909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389,206,1011,934, 806,751,623,440,312,985,857,674,546,491,363,180,908,780,725,597,414,286,959, 831,648,520,465,154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907, 596,230,830,519,464,698,387,1009,932,621,310,855,544,489,178,778,723,412,957, 646,880,569,258,803,748,437,126,982,671,360,905,594,228,828,517,462,696,385, 1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567,256,801,746,435, 124,980,669,358,903,592,226,826,515,460,694,383,1005,928,617,306,851,540,485, 174,774,719,408,953,642,876,565,510,799,744,433,122] [ns_server:debug,2014-08-19T16:50:40.261,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 664) [ns_server:debug,2014-08-19T16:50:40.263,ns_1@10.242.238.88:<0.11280.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 664 into 'ns_1@10.242.238.89' is <18124.28394.0> [ns_server:debug,2014-08-19T16:50:40.265,ns_1@10.242.238.88:<0.11280.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 664 into 'ns_1@10.242.238.90' is <18125.24587.0> [rebalance:debug,2014-08-19T16:50:40.265,ns_1@10.242.238.88:<0.11272.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 664 is <0.11280.1> [ns_server:debug,2014-08-19T16:50:40.295,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,286547}, tap_estimate, {replica_building,"default",664,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28394.0>, <<"replication_building_664_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:40.309,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,300197}, tap_estimate, {replica_building,"default",664,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24587.0>, <<"replication_building_664_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:40.309,ns_1@10.242.238.88:<0.11281.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.24587.0>}, {'ns_1@10.242.238.89',<18124.28394.0>}]) [rebalance:info,2014-08-19T16:50:40.309,ns_1@10.242.238.88:<0.11272.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:40.310,ns_1@10.242.238.88:<0.11272.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 664 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:40.310,ns_1@10.242.238.88:<0.11272.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:40.311,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{664, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:40.314,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{408, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:40.314,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",408, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.11293.1>) [ns_server:debug,2014-08-19T16:50:40.315,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 408) [ns_server:debug,2014-08-19T16:50:40.315,ns_1@10.242.238.88:<0.11294.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:40.315,ns_1@10.242.238.88:<0.11294.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:40.315,ns_1@10.242.238.88:<0.11293.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 408 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:40.316,ns_1@10.242.238.88:<0.11299.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 408 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:40.316,ns_1@10.242.238.88:<0.11300.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 408 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:40.319,ns_1@10.242.238.88:<0.11301.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 408 into 'ns_1@10.242.238.90' is <18125.24593.0> [ns_server:debug,2014-08-19T16:50:40.322,ns_1@10.242.238.88:<0.11301.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 408 into 'ns_1@10.242.238.89' is <18124.28399.0> [rebalance:debug,2014-08-19T16:50:40.322,ns_1@10.242.238.88:<0.11293.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 408 is <0.11301.1> [views:debug,2014-08-19T16:50:40.342,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/357. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:40.342,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",357,active,0} [ns_server:debug,2014-08-19T16:50:40.350,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,341803}, tap_estimate, {replica_building,"default",408,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24593.0>, <<"replication_building_408_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:40.364,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,355539}, tap_estimate, {replica_building,"default",408,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28399.0>, <<"replication_building_408_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:40.365,ns_1@10.242.238.88:<0.11302.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.28399.0>}, {'ns_1@10.242.238.90',<18125.24593.0>}]) [rebalance:info,2014-08-19T16:50:40.365,ns_1@10.242.238.88:<0.11293.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:40.365,ns_1@10.242.238.88:<0.11293.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 408 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:40.366,ns_1@10.242.238.88:<0.11293.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:40.366,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{408, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:40.370,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{918, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:40.370,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",918, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.11314.1>) [ns_server:debug,2014-08-19T16:50:40.370,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 918) [ns_server:debug,2014-08-19T16:50:40.371,ns_1@10.242.238.88:<0.11315.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:40.371,ns_1@10.242.238.88:<0.11315.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:40.371,ns_1@10.242.238.88:<0.11314.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 918 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:40.371,ns_1@10.242.238.88:<0.11320.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 918 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:40.371,ns_1@10.242.238.88:<0.11321.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 918 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:40.375,ns_1@10.242.238.88:<0.11322.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 918 into 'ns_1@10.242.238.89' is <18124.28419.0> [ns_server:debug,2014-08-19T16:50:40.377,ns_1@10.242.238.88:<0.11322.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 918 into 'ns_1@10.242.238.91' is <18126.26941.0> [rebalance:debug,2014-08-19T16:50:40.377,ns_1@10.242.238.88:<0.11314.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 918 is <0.11322.1> [ns_server:debug,2014-08-19T16:50:40.406,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,397101}, tap_estimate, {replica_building,"default",918,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28419.0>, <<"replication_building_918_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:40.419,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,410766}, tap_estimate, {replica_building,"default",918,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26941.0>, <<"replication_building_918_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:40.420,ns_1@10.242.238.88:<0.11323.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26941.0>}, {'ns_1@10.242.238.89',<18124.28419.0>}]) [rebalance:info,2014-08-19T16:50:40.420,ns_1@10.242.238.88:<0.11314.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:40.421,ns_1@10.242.238.88:<0.11314.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 918 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:40.421,ns_1@10.242.238.88:<0.11314.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:40.422,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{918, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:40.425,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{663, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:40.425,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",663, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.11335.1>) [ns_server:debug,2014-08-19T16:50:40.425,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 663) [ns_server:debug,2014-08-19T16:50:40.425,ns_1@10.242.238.88:<0.11336.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:40.426,ns_1@10.242.238.88:<0.11336.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:40.426,ns_1@10.242.238.88:<0.11335.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 663 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:40.426,ns_1@10.242.238.88:<0.11341.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 663 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:40.426,ns_1@10.242.238.88:<0.11342.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 663 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:40.430,ns_1@10.242.238.88:<0.11343.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 663 into 'ns_1@10.242.238.89' is <18124.28424.0> [ns_server:debug,2014-08-19T16:50:40.432,ns_1@10.242.238.88:<0.11343.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 663 into 'ns_1@10.242.238.90' is <18125.24612.0> [rebalance:debug,2014-08-19T16:50:40.432,ns_1@10.242.238.88:<0.11335.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 663 is <0.11343.1> [ns_server:debug,2014-08-19T16:50:40.461,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,452831}, tap_estimate, {replica_building,"default",663,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28424.0>, <<"replication_building_663_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:40.474,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,465926}, tap_estimate, {replica_building,"default",663,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24612.0>, <<"replication_building_663_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:40.475,ns_1@10.242.238.88:<0.11352.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.24612.0>}, {'ns_1@10.242.238.89',<18124.28424.0>}]) [rebalance:info,2014-08-19T16:50:40.475,ns_1@10.242.238.88:<0.11335.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:40.476,ns_1@10.242.238.88:<0.11335.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 663 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:40.476,ns_1@10.242.238.88:<0.11335.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:40.477,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{663, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:40.480,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{407, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:40.480,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",407, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.11370.1>) [ns_server:debug,2014-08-19T16:50:40.480,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 407) [ns_server:debug,2014-08-19T16:50:40.481,ns_1@10.242.238.88:<0.11371.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:40.481,ns_1@10.242.238.88:<0.11371.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:40.481,ns_1@10.242.238.88:<0.11370.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 407 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:40.481,ns_1@10.242.238.88:<0.11376.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 407 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:40.481,ns_1@10.242.238.88:<0.11377.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 407 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:40.485,ns_1@10.242.238.88:<0.11378.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 407 into 'ns_1@10.242.238.90' is <18125.24632.0> [ns_server:debug,2014-08-19T16:50:40.487,ns_1@10.242.238.88:<0.11378.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 407 into 'ns_1@10.242.238.89' is <18124.28429.0> [rebalance:debug,2014-08-19T16:50:40.487,ns_1@10.242.238.88:<0.11370.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 407 is <0.11378.1> [ns_server:debug,2014-08-19T16:50:40.515,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,506830}, tap_estimate, {replica_building,"default",407,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24632.0>, <<"replication_building_407_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:40.517,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 355. Nacking mccouch update. [views:debug,2014-08-19T16:50:40.517,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/355. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:40.517,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",355,active,0} [ns_server:debug,2014-08-19T16:50:40.519,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,978,850,667,539,484,356,901,773,718,590,407,224,952,824,641,513,458,330, 875,692,564,509,381,198,1003,926,798,743,615,432,304,977,849,666,538,483,355, 172,900,772,717,589,406,278,951,823,640,512,457,146,874,691,563,508,380,1002, 925,797,742,614,431,248,120,976,848,665,537,482,354,899,771,716,588,405,222, 950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302, 975,847,664,536,481,170,898,770,715,587,404,276,949,821,766,638,455,144,872, 689,561,506,378,1000,923,795,740,612,429,246,118,974,846,663,535,480,352,897, 769,714,586,403,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922, 794,739,611,428,300,973,845,662,534,479,168,896,768,713,585,402,274,947,819, 764,636,453,142,998,870,687,559,504,376,921,793,738,610,427,244,116,972,844, 661,533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324,997,869,686, 558,503,375,192,920,792,737,609,426,298,971,843,660,532,477,166,894,711,583, 400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791,736,608, 425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633, 450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530, 475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500, 372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214, 1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422, 294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136, 992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344, 889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186, 914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939, 811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964, 836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703, 575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728, 600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753,625, 442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833,650,522, 467,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492, 364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389,206,1011, 934,806,751,623,440,312,985,857,674,546,491,363,180,908,780,725,597,414,286, 959,831,648,520,465,154,882,699,571,388,260,1010,805,750,439,128,984,673,362, 907,596,230,830,519,464,698,387,1009,932,621,310,855,544,489,178,778,723,412, 957,646,880,569,258,803,748,437,126,982,671,360,905,594,228,828,517,462,696, 385,1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567,256,801,746, 435,124,980,669,358,903,592,226,826,515,460,694,383,1005,928,617,306,851,540, 485,174,774,719,408,953,642,876,565,510,799,744,433,122] [ns_server:debug,2014-08-19T16:50:40.530,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,521185}, tap_estimate, {replica_building,"default",407,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28429.0>, <<"replication_building_407_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:40.530,ns_1@10.242.238.88:<0.11379.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.28429.0>}, {'ns_1@10.242.238.90',<18125.24632.0>}]) [rebalance:info,2014-08-19T16:50:40.530,ns_1@10.242.238.88:<0.11370.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:40.531,ns_1@10.242.238.88:<0.11370.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 407 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:40.532,ns_1@10.242.238.88:<0.11370.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:40.532,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{407, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:40.535,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{917, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:40.535,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",917, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.11391.1>) [ns_server:debug,2014-08-19T16:50:40.536,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 917) [ns_server:debug,2014-08-19T16:50:40.536,ns_1@10.242.238.88:<0.11392.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:40.536,ns_1@10.242.238.88:<0.11392.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:40.536,ns_1@10.242.238.88:<0.11391.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 917 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:40.537,ns_1@10.242.238.88:<0.11397.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 917 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:40.537,ns_1@10.242.238.88:<0.11398.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 917 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:40.540,ns_1@10.242.238.88:<0.11399.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 917 into 'ns_1@10.242.238.89' is <18124.28435.0> [ns_server:debug,2014-08-19T16:50:40.543,ns_1@10.242.238.88:<0.11399.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 917 into 'ns_1@10.242.238.91' is <18126.26967.0> [rebalance:debug,2014-08-19T16:50:40.543,ns_1@10.242.238.88:<0.11391.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 917 is <0.11399.1> [ns_server:debug,2014-08-19T16:50:40.572,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,563417}, tap_estimate, {replica_building,"default",917,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28435.0>, <<"replication_building_917_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:40.586,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,577063}, tap_estimate, {replica_building,"default",917,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26967.0>, <<"replication_building_917_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:40.586,ns_1@10.242.238.88:<0.11400.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26967.0>}, {'ns_1@10.242.238.89',<18124.28435.0>}]) [rebalance:info,2014-08-19T16:50:40.586,ns_1@10.242.238.88:<0.11391.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:40.587,ns_1@10.242.238.88:<0.11391.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 917 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:40.587,ns_1@10.242.238.88:<0.11391.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:40.588,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{917, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:40.591,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{662, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:40.591,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",662, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.11412.1>) [ns_server:debug,2014-08-19T16:50:40.591,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 662) [ns_server:debug,2014-08-19T16:50:40.592,ns_1@10.242.238.88:<0.11413.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:40.592,ns_1@10.242.238.88:<0.11413.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:40.592,ns_1@10.242.238.88:<0.11412.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 662 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:40.592,ns_1@10.242.238.88:<0.11418.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 662 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:40.592,ns_1@10.242.238.88:<0.11419.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 662 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [views:debug,2014-08-19T16:50:40.592,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/355. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:40.593,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",355,active,0} [ns_server:debug,2014-08-19T16:50:40.596,ns_1@10.242.238.88:<0.11420.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 662 into 'ns_1@10.242.238.89' is <18124.28454.0> [ns_server:debug,2014-08-19T16:50:40.599,ns_1@10.242.238.88:<0.11420.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 662 into 'ns_1@10.242.238.90' is <18125.24651.0> [rebalance:debug,2014-08-19T16:50:40.599,ns_1@10.242.238.88:<0.11412.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 662 is <0.11420.1> [ns_server:debug,2014-08-19T16:50:40.629,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,620273}, tap_estimate, {replica_building,"default",662,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28454.0>, <<"replication_building_662_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:40.642,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,633065}, tap_estimate, {replica_building,"default",662,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24651.0>, <<"replication_building_662_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:40.642,ns_1@10.242.238.88:<0.11421.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.24651.0>}, {'ns_1@10.242.238.89',<18124.28454.0>}]) [rebalance:info,2014-08-19T16:50:40.642,ns_1@10.242.238.88:<0.11412.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:40.643,ns_1@10.242.238.88:<0.11412.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 662 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:40.643,ns_1@10.242.238.88:<0.11412.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:40.644,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{662, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:40.647,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{406, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:40.647,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",406, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.11433.1>) [ns_server:debug,2014-08-19T16:50:40.647,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 406) [ns_server:debug,2014-08-19T16:50:40.648,ns_1@10.242.238.88:<0.11434.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:40.648,ns_1@10.242.238.88:<0.11434.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:40.648,ns_1@10.242.238.88:<0.11433.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 406 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:40.648,ns_1@10.242.238.88:<0.11439.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 406 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:40.648,ns_1@10.242.238.88:<0.11440.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 406 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:40.652,ns_1@10.242.238.88:<0.11441.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 406 into 'ns_1@10.242.238.90' is <18125.24657.0> [ns_server:debug,2014-08-19T16:50:40.654,ns_1@10.242.238.88:<0.11441.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 406 into 'ns_1@10.242.238.89' is <18124.28459.0> [rebalance:debug,2014-08-19T16:50:40.654,ns_1@10.242.238.88:<0.11433.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 406 is <0.11441.1> [ns_server:debug,2014-08-19T16:50:40.683,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,674429}, tap_estimate, {replica_building,"default",406,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.24657.0>, <<"replication_building_406_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:40.696,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,687691}, tap_estimate, {replica_building,"default",406,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28459.0>, <<"replication_building_406_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:40.697,ns_1@10.242.238.88:<0.11442.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.28459.0>}, {'ns_1@10.242.238.90',<18125.24657.0>}]) [rebalance:info,2014-08-19T16:50:40.697,ns_1@10.242.238.88:<0.11433.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:40.698,ns_1@10.242.238.88:<0.11433.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 406 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:40.698,ns_1@10.242.238.88:<0.11433.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:40.699,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{406, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:40.702,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{916, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:40.702,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",916, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.11468.1>) [ns_server:debug,2014-08-19T16:50:40.702,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 916) [ns_server:debug,2014-08-19T16:50:40.702,ns_1@10.242.238.88:<0.11469.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:40.703,ns_1@10.242.238.88:<0.11469.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:40.703,ns_1@10.242.238.88:<0.11468.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 916 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:40.703,ns_1@10.242.238.88:<0.11474.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 916 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:40.703,ns_1@10.242.238.88:<0.11475.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 916 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:40.707,ns_1@10.242.238.88:<0.11476.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 916 into 'ns_1@10.242.238.89' is <18124.28465.0> [ns_server:debug,2014-08-19T16:50:40.709,ns_1@10.242.238.88:<0.11476.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 916 into 'ns_1@10.242.238.91' is <18126.26989.0> [rebalance:debug,2014-08-19T16:50:40.709,ns_1@10.242.238.88:<0.11468.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 916 is <0.11476.1> [ns_server:debug,2014-08-19T16:50:40.737,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,728852}, tap_estimate, {replica_building,"default",916,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.28465.0>, <<"replication_building_916_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:40.754,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452640,745372}, tap_estimate, {replica_building,"default",916,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.26989.0>, <<"replication_building_916_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:40.754,ns_1@10.242.238.88:<0.11477.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.26989.0>}, {'ns_1@10.242.238.89',<18124.28465.0>}]) [rebalance:info,2014-08-19T16:50:40.755,ns_1@10.242.238.88:<0.11468.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:40.755,ns_1@10.242.238.88:<0.11468.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 916 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:40.756,ns_1@10.242.238.88:<0.11468.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:40.756,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{916, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:40.757,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:40.768,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 353. Nacking mccouch update. [views:debug,2014-08-19T16:50:40.768,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/353. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:40.768,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",353,active,0} [ns_server:debug,2014-08-19T16:50:40.770,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,978,850,667,539,484,356,901,773,718,590,407,224,952,824,641,513,458,330, 875,692,564,509,381,198,1003,926,798,743,615,432,304,977,849,666,538,483,355, 172,900,772,717,589,406,278,951,823,640,512,457,146,874,691,563,508,380,1002, 925,797,742,614,431,248,120,976,848,665,537,482,354,899,771,716,588,405,222, 950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302, 975,847,664,536,481,353,170,898,770,715,587,404,276,949,821,766,638,455,144, 872,689,561,506,378,1000,923,795,740,612,429,246,118,974,846,663,535,480,352, 897,769,714,586,403,220,948,820,765,637,454,326,999,871,688,560,505,377,194, 922,794,739,611,428,300,973,845,662,534,479,168,896,768,713,585,402,274,947, 819,764,636,453,142,998,870,687,559,504,376,921,793,738,610,427,244,116,972, 844,661,533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324,997,869, 686,558,503,375,192,920,792,737,609,426,298,971,843,660,532,477,166,894,711, 583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791,736, 608,425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761, 633,450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,969,841,658, 530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555, 500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397, 214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605, 422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447, 136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472, 344,889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369, 186,914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016, 939,811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108, 964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989, 861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,158,886, 703,575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783, 728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753, 625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833,650, 522,467,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547, 492,364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389,206, 1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908,780,725,597,414, 286,959,831,648,520,465,154,882,699,571,388,260,1010,805,750,439,128,984,673, 362,907,596,230,830,519,464,698,387,1009,932,621,310,855,544,489,178,778,723, 412,957,646,880,569,258,803,748,437,126,982,671,360,905,594,228,828,517,462, 696,385,1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567,256,801, 746,435,124,980,669,358,903,592,226,826,515,460,694,383,1005,928,617,306,851, 540,485,174,774,719,408,953,642,876,565,510,799,744,433,122] [views:debug,2014-08-19T16:50:40.852,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/353. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:40.852,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",353,active,0} [ns_server:debug,2014-08-19T16:50:41.027,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 351. Nacking mccouch update. [views:debug,2014-08-19T16:50:41.027,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/351. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:41.027,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",351,active,0} [ns_server:debug,2014-08-19T16:50:41.029,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,978,850,667,539,484,356,901,773,718,590,407,224,952,824,641,513,458,330, 875,692,564,509,381,198,1003,926,798,743,615,432,304,977,849,666,538,483,355, 172,900,772,717,589,406,278,951,823,640,512,457,146,874,691,563,508,380,1002, 925,797,742,614,431,248,120,976,848,665,537,482,354,899,771,716,588,405,222, 950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302, 975,847,664,536,481,353,170,898,770,715,587,404,276,949,821,766,638,455,144, 872,689,561,506,378,1000,923,795,740,612,429,246,118,974,846,663,535,480,352, 897,769,714,586,403,220,948,820,765,637,454,326,999,871,688,560,505,377,194, 922,794,739,611,428,300,973,845,662,534,479,351,168,896,768,713,585,402,274, 947,819,764,636,453,142,998,870,687,559,504,376,921,793,738,610,427,244,116, 972,844,661,533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324,997, 869,686,558,503,375,192,920,792,737,609,426,298,971,843,660,532,477,166,894, 711,583,400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791, 736,608,425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816, 761,633,450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,969,841, 658,530,475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683, 555,500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580, 397,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733, 605,422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630, 447,136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527, 472,344,889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497, 369,186,914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266, 1016,939,811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236, 108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316, 989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,158, 886,703,575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911, 783,728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808, 753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833, 650,522,467,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675, 547,492,364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389, 206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908,780,725,597, 414,286,959,831,648,520,465,154,882,699,571,388,260,1010,805,750,439,128,984, 673,362,907,596,230,830,519,464,698,387,1009,932,621,310,855,544,489,178,778, 723,412,957,646,880,569,258,803,748,437,126,982,671,360,905,594,228,828,517, 462,696,385,1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567,256, 801,746,435,124,980,669,358,903,592,226,826,515,460,694,383,1005,928,617,306, 851,540,485,174,774,719,408,953,642,876,565,510,799,744,433,122] [views:debug,2014-08-19T16:50:41.100,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/351. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:41.100,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",351,active,0} [ns_server:debug,2014-08-19T16:50:41.183,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 349. Nacking mccouch update. [views:debug,2014-08-19T16:50:41.183,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/349. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:41.183,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",349,active,0} [ns_server:debug,2014-08-19T16:50:41.185,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,850,539,484,901,773,718,590,407,224,952,824,641,513,458,330,875,692,564, 509,381,198,1003,926,798,743,615,432,304,977,849,666,538,483,355,172,900,772, 717,589,406,278,951,823,640,512,457,146,874,691,563,508,380,1002,925,797,742, 614,431,248,120,976,848,665,537,482,354,899,771,716,588,405,222,950,822,767, 639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302,975,847,664, 536,481,353,170,898,770,715,587,404,276,949,821,766,638,455,144,872,689,561, 506,378,1000,923,795,740,612,429,246,118,974,846,663,535,480,352,897,769,714, 586,403,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739, 611,428,300,973,845,662,534,479,351,168,896,768,713,585,402,274,947,819,764, 636,453,142,998,870,687,559,504,376,921,793,738,610,427,244,116,972,844,661, 533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324,997,869,686,558, 503,375,192,920,792,737,609,426,298,971,843,660,532,477,349,166,894,711,583, 400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791,736,608, 425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633, 450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530, 475,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500, 372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214, 1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422, 294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447,136, 992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344, 889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186, 914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016,939, 811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964, 836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,158,886,703, 575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728, 600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753,625, 442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833,650,522, 467,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492, 364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389,206,1011, 934,806,751,623,440,312,985,857,674,546,491,363,180,908,780,725,597,414,286, 959,831,648,520,465,154,882,699,571,388,260,1010,805,750,439,128,984,673,362, 907,596,230,830,519,464,698,387,1009,932,621,310,855,544,489,178,778,723,412, 957,646,880,569,258,803,748,437,126,982,671,360,905,594,228,828,517,462,696, 385,1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567,256,801,746, 435,124,980,669,358,903,592,226,826,515,460,694,383,1005,928,617,306,851,540, 485,174,774,719,408,953,642,876,565,510,799,744,433,122,978,667,356] [views:debug,2014-08-19T16:50:41.217,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/349. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:41.218,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",349,active,0} [ns_server:debug,2014-08-19T16:50:41.292,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 347. Nacking mccouch update. [views:debug,2014-08-19T16:50:41.292,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/347. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:41.293,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",347,active,0} [ns_server:debug,2014-08-19T16:50:41.294,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,850,539,484,901,773,718,590,407,224,952,824,641,513,458,330,875,692,564, 509,381,198,1003,926,798,743,615,432,304,977,849,666,538,483,355,172,900,772, 717,589,406,278,951,823,640,512,457,146,874,691,563,508,380,1002,925,797,742, 614,431,248,120,976,848,665,537,482,354,899,771,716,588,405,222,950,822,767, 639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302,975,847,664, 536,481,353,170,898,770,715,587,404,276,949,821,766,638,455,144,872,689,561, 506,378,1000,923,795,740,612,429,246,118,974,846,663,535,480,352,897,769,714, 586,403,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739, 611,428,300,973,845,662,534,479,351,168,896,768,713,585,402,274,947,819,764, 636,453,142,998,870,687,559,504,376,921,793,738,610,427,244,116,972,844,661, 533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324,997,869,686,558, 503,375,192,920,792,737,609,426,298,971,843,660,532,477,349,166,894,711,583, 400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791,736,608, 425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633, 450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530, 475,347,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555, 500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397, 214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605, 422,294,967,839,656,528,473,162,890,707,579,396,268,1018,941,813,758,630,447, 136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472, 344,889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369, 186,914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266,1016, 939,811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108, 964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989, 861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,158,886, 703,575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783, 728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753, 625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833,650, 522,467,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547, 492,364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389,206, 1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908,780,725,597,414, 286,959,831,648,520,465,154,882,699,571,388,260,1010,805,750,439,128,984,673, 362,907,596,230,830,519,464,698,387,1009,932,621,310,855,544,489,178,778,723, 412,957,646,880,569,258,803,748,437,126,982,671,360,905,594,228,828,517,462, 696,385,1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567,256,801, 746,435,124,980,669,358,903,592,226,826,515,460,694,383,1005,928,617,306,851, 540,485,174,774,719,408,953,642,876,565,510,799,744,433,122,978,667,356] [views:debug,2014-08-19T16:50:41.326,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/347. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:41.327,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",347,active,0} [ns_server:debug,2014-08-19T16:50:41.402,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 345. Nacking mccouch update. [views:debug,2014-08-19T16:50:41.402,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/345. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:41.402,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",345,active,0} [ns_server:debug,2014-08-19T16:50:41.404,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,850,539,484,901,773,718,590,407,224,952,824,641,513,458,330,875,692,564, 509,381,198,1003,926,798,743,615,432,304,977,849,666,538,483,355,172,900,772, 717,589,406,278,951,823,640,512,457,146,874,691,563,508,380,1002,925,797,742, 614,431,248,120,976,848,665,537,482,354,899,771,716,588,405,222,950,822,767, 639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302,975,847,664, 536,481,353,170,898,770,715,587,404,276,949,821,766,638,455,144,872,689,561, 506,378,1000,923,795,740,612,429,246,118,974,846,663,535,480,352,897,769,714, 586,403,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739, 611,428,300,973,845,662,534,479,351,168,896,768,713,585,402,274,947,819,764, 636,453,142,998,870,687,559,504,376,921,793,738,610,427,244,116,972,844,661, 533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324,997,869,686,558, 503,375,192,920,792,737,609,426,298,971,843,660,532,477,349,166,894,711,583, 400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791,736,608, 425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633, 450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530, 475,347,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555, 500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397, 214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605, 422,294,967,839,656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630, 447,136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527, 472,344,889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497, 369,186,914,786,731,603,420,292,965,837,654,526,471,160,888,705,577,394,266, 1016,939,811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236, 108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316, 989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,158, 886,703,575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911, 783,728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808, 753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833, 650,522,467,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675, 547,492,364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389, 206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908,780,725,597, 414,286,959,831,648,520,465,154,882,699,571,388,260,1010,805,750,439,128,984, 673,362,907,596,230,830,519,464,698,387,1009,932,621,310,855,544,489,178,778, 723,412,957,646,880,569,258,803,748,437,126,982,671,360,905,594,228,828,517, 462,696,385,1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567,256, 801,746,435,124,980,669,358,903,592,226,826,515,460,694,383,1005,928,617,306, 851,540,485,174,774,719,408,953,642,876,565,510,799,744,433,122,978,667,356] [views:debug,2014-08-19T16:50:41.435,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/345. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:41.435,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",345,active,0} [ns_server:debug,2014-08-19T16:50:41.511,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 343. Nacking mccouch update. [views:debug,2014-08-19T16:50:41.511,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/343. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:41.511,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",343,active,0} [ns_server:debug,2014-08-19T16:50:41.513,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,850,539,484,901,773,718,590,407,224,952,824,641,513,458,330,875,692,564, 509,381,198,1003,926,798,743,615,432,304,977,849,666,538,483,355,172,900,772, 717,589,406,278,951,823,640,512,457,146,874,691,563,508,380,1002,925,797,742, 614,431,248,120,976,848,665,537,482,354,899,771,716,588,405,222,950,822,767, 639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302,975,847,664, 536,481,353,170,898,770,715,587,404,276,949,821,766,638,455,144,872,689,561, 506,378,1000,923,795,740,612,429,246,118,974,846,663,535,480,352,897,769,714, 586,403,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739, 611,428,300,973,845,662,534,479,351,168,896,768,713,585,402,274,947,819,764, 636,453,142,998,870,687,559,504,376,921,793,738,610,427,244,116,972,844,661, 533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324,997,869,686,558, 503,375,192,920,792,737,609,426,298,971,843,660,532,477,349,166,894,711,583, 400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791,736,608, 425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633, 450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530, 475,347,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555, 500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397, 214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605, 422,294,967,839,656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630, 447,136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527, 472,344,889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497, 369,186,914,786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394, 266,1016,939,811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419, 236,108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444, 316,989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469, 158,886,703,575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366, 911,783,728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936, 808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961, 833,650,522,467,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858, 675,547,492,364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572, 389,206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908,780,725, 597,414,286,959,831,648,520,465,154,882,699,571,388,260,1010,805,750,439,128, 984,673,362,907,596,230,830,519,464,698,387,1009,932,621,310,855,544,489,178, 778,723,412,957,646,880,569,258,803,748,437,126,982,671,360,905,594,228,828, 517,462,696,385,1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567, 256,801,746,435,124,980,669,358,903,592,226,826,515,460,694,383,1005,928,617, 306,851,540,485,174,774,719,408,953,642,876,565,510,799,744,433,122,978,667, 356] [views:debug,2014-08-19T16:50:41.545,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/343. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:41.545,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",343,active,0} [ns_server:debug,2014-08-19T16:50:41.685,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 341. Nacking mccouch update. [views:debug,2014-08-19T16:50:41.686,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/341. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:41.686,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",341,active,0} [ns_server:debug,2014-08-19T16:50:41.688,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,850,539,484,901,773,718,590,407,224,952,824,641,513,458,330,875,692,564, 509,381,198,1003,926,798,743,615,432,304,977,849,666,538,483,355,172,900,772, 717,589,406,278,951,823,640,512,457,146,874,691,563,508,380,1002,925,797,742, 614,431,248,120,976,848,665,537,482,354,899,771,716,588,405,222,950,822,767, 639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302,975,847,664, 536,481,353,170,898,770,715,587,404,276,949,821,766,638,455,144,872,689,561, 506,378,1000,923,795,740,612,429,246,118,974,846,663,535,480,352,897,769,714, 586,403,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739, 611,428,300,973,845,662,534,479,351,168,896,768,713,585,402,274,947,819,764, 636,453,142,998,870,687,559,504,376,921,793,738,610,427,244,116,972,844,661, 533,478,350,895,712,584,401,218,1023,946,818,763,635,452,324,997,869,686,558, 503,375,192,920,792,737,609,426,298,971,843,660,532,477,349,166,894,711,583, 400,272,1022,945,817,762,634,451,140,996,868,685,557,502,374,919,791,736,608, 425,242,114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633, 450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530, 475,347,164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555, 500,372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397, 214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605, 422,294,967,839,656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630, 447,136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527, 472,344,889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497, 369,186,914,786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394, 266,1016,939,811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419, 236,108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444, 316,989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469, 341,158,886,703,575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494, 366,911,783,728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013, 936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288, 961,833,650,522,467,156,884,701,573,390,262,1012,935,807,752,624,441,130,986, 858,675,547,492,364,909,781,726,598,415,232,960,832,649,521,466,338,883,700, 572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908,780, 725,597,414,286,959,831,648,520,465,154,882,699,571,388,260,1010,805,750,439, 128,984,673,362,907,596,230,830,519,464,698,387,1009,932,621,310,855,544,489, 178,778,723,412,957,646,880,569,258,803,748,437,126,982,671,360,905,594,228, 828,517,462,696,385,1007,930,619,308,853,542,487,176,776,721,410,955,644,878, 567,256,801,746,435,124,980,669,358,903,592,226,826,515,460,694,383,1005,928, 617,306,851,540,485,174,774,719,408,953,642,876,565,510,799,744,433,122,978, 667,356] [views:debug,2014-08-19T16:50:41.761,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/341. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:41.762,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",341,active,0} [ns_server:debug,2014-08-19T16:50:41.936,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 339. Nacking mccouch update. [views:debug,2014-08-19T16:50:41.937,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/339. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:41.937,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",339,active,0} [ns_server:debug,2014-08-19T16:50:41.939,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,850,539,484,773,718,407,952,824,641,513,458,330,875,692,564,509,381,198, 1003,926,798,743,615,432,304,977,849,666,538,483,355,172,900,772,717,589,406, 278,951,823,640,512,457,146,874,691,563,508,380,1002,925,797,742,614,431,248, 120,976,848,665,537,482,354,899,771,716,588,405,222,950,822,767,639,456,328, 873,690,562,507,379,196,1001,924,796,741,613,430,302,975,847,664,536,481,353, 170,898,770,715,587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000, 923,795,740,612,429,246,118,974,846,663,535,480,352,897,769,714,586,403,220, 948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300, 973,845,662,534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,142, 998,870,687,559,504,376,921,793,738,610,427,244,116,972,844,661,533,478,350, 895,712,584,401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192, 920,792,737,609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022, 945,817,762,634,451,140,996,868,685,557,502,374,919,791,736,608,425,242,114, 970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995, 867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164, 892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500,372,917, 789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942, 814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967, 839,656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,136,992, 864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964, 836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158,886, 703,575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783, 728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753, 625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833,650, 522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675, 547,492,364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389, 206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908,780,725,597, 414,286,959,831,648,520,465,154,882,699,571,388,260,1010,805,750,439,128,984, 673,362,907,596,230,830,519,464,698,387,1009,932,621,310,855,544,489,178,778, 723,412,957,646,880,569,258,803,748,437,126,982,671,360,905,594,228,828,517, 462,696,385,1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567,256, 801,746,435,124,980,669,358,903,592,226,826,515,460,694,383,1005,928,617,306, 851,540,485,174,774,719,408,953,642,876,565,510,799,744,433,122,978,667,356, 901,590,224] [views:debug,2014-08-19T16:50:42.020,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/339. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:42.021,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",339,active,0} [ns_server:debug,2014-08-19T16:50:42.196,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 337. Nacking mccouch update. [views:debug,2014-08-19T16:50:42.196,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/337. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:42.196,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",337,active,0} [ns_server:debug,2014-08-19T16:50:42.198,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,850,539,484,773,718,407,952,824,641,513,458,330,875,692,564,509,381,198, 1003,926,798,743,615,432,304,977,849,666,538,483,355,172,900,772,717,589,406, 278,951,823,640,512,457,146,874,691,563,508,380,1002,925,797,742,614,431,248, 120,976,848,665,537,482,354,899,771,716,588,405,222,950,822,767,639,456,328, 873,690,562,507,379,196,1001,924,796,741,613,430,302,975,847,664,536,481,353, 170,898,770,715,587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000, 923,795,740,612,429,246,118,974,846,663,535,480,352,897,769,714,586,403,220, 948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300, 973,845,662,534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,142, 998,870,687,559,504,376,921,793,738,610,427,244,116,972,844,661,533,478,350, 895,712,584,401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192, 920,792,737,609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022, 945,817,762,634,451,140,996,868,685,557,502,374,919,791,736,608,425,242,114, 970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995, 867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164, 892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500,372,917, 789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942, 814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967, 839,656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,136,992, 864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964, 836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158,886, 703,575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783, 728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753, 625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833,650, 522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675, 547,492,364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389, 206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908,780,725,597, 414,286,959,831,648,520,465,337,154,882,699,571,388,260,1010,805,750,439,128, 984,673,362,907,596,230,830,519,464,698,387,1009,932,621,310,855,544,489,178, 778,723,412,957,646,880,569,258,803,748,437,126,982,671,360,905,594,228,828, 517,462,696,385,1007,930,619,308,853,542,487,176,776,721,410,955,644,878,567, 256,801,746,435,124,980,669,358,903,592,226,826,515,460,694,383,1005,928,617, 306,851,540,485,174,774,719,408,953,642,876,565,510,799,744,433,122,978,667, 356,901,590,224] [views:debug,2014-08-19T16:50:42.279,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/337. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:42.280,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",337,active,0} [ns_server:debug,2014-08-19T16:50:42.455,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 335. Nacking mccouch update. [views:debug,2014-08-19T16:50:42.455,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/335. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:42.455,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",335,active,0} [ns_server:debug,2014-08-19T16:50:42.457,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,850,539,484,773,718,407,952,824,641,513,458,330,875,692,564,509,381,198, 1003,926,798,743,615,432,304,977,849,666,538,483,355,172,900,772,717,589,406, 278,951,823,640,512,457,146,874,691,563,508,380,1002,925,797,742,614,431,248, 120,976,848,665,537,482,354,899,771,716,588,405,222,950,822,767,639,456,328, 873,690,562,507,379,196,1001,924,796,741,613,430,302,975,847,664,536,481,353, 170,898,770,715,587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000, 923,795,740,612,429,246,118,974,846,663,535,480,352,897,769,714,586,403,220, 948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300, 973,845,662,534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,142, 998,870,687,559,504,376,921,793,738,610,427,244,116,972,844,661,533,478,350, 895,712,584,401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192, 920,792,737,609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022, 945,817,762,634,451,140,996,868,685,557,502,374,919,791,736,608,425,242,114, 970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995, 867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164, 892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500,372,917, 789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942, 814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967, 839,656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,136,992, 864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964, 836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158,886, 703,575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783, 728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753, 625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833,650, 522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675, 547,492,364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389, 206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908,780,725,597, 414,286,959,831,648,520,465,337,154,882,699,571,388,260,1010,805,750,439,128, 984,673,362,907,596,230,830,519,464,698,387,1009,932,621,310,855,544,489,178, 778,723,412,957,646,335,880,569,258,803,748,437,126,982,671,360,905,594,228, 828,517,462,696,385,1007,930,619,308,853,542,487,176,776,721,410,955,644,878, 567,256,801,746,435,124,980,669,358,903,592,226,826,515,460,694,383,1005,928, 617,306,851,540,485,174,774,719,408,953,642,876,565,510,799,744,433,122,978, 667,356,901,590,224] [views:debug,2014-08-19T16:50:42.522,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/335. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:42.522,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",335,active,0} [ns_server:debug,2014-08-19T16:50:42.682,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 333. Nacking mccouch update. [views:debug,2014-08-19T16:50:42.682,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/333. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:42.682,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",333,active,0} [ns_server:debug,2014-08-19T16:50:42.684,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,850,539,484,773,718,407,952,824,641,513,458,330,875,692,564,509,381,198, 1003,926,798,743,615,432,304,977,849,666,538,483,355,172,900,772,717,589,406, 278,951,823,640,512,457,146,874,691,563,508,380,1002,925,797,742,614,431,248, 120,976,848,665,537,482,354,899,771,716,588,405,222,950,822,767,639,456,328, 873,690,562,507,379,196,1001,924,796,741,613,430,302,975,847,664,536,481,353, 170,898,770,715,587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000, 923,795,740,612,429,246,118,974,846,663,535,480,352,897,769,714,586,403,220, 948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300, 973,845,662,534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,142, 998,870,687,559,504,376,921,793,738,610,427,244,116,972,844,661,533,478,350, 895,712,584,401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192, 920,792,737,609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022, 945,817,762,634,451,140,996,868,685,557,502,374,919,791,736,608,425,242,114, 970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995, 867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164, 892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500,372,917, 789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942, 814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967, 839,656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,136,992, 864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964, 836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158,886, 703,575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783, 728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753, 625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833,650, 522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675, 547,492,364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389, 206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908,780,725,597, 414,286,959,831,648,520,465,337,154,882,699,571,388,260,1010,805,750,439,128, 984,673,362,907,596,230,830,519,464,698,387,1009,932,621,310,855,544,489,178, 778,723,412,957,646,335,880,569,258,803,748,437,126,982,671,360,905,594,228, 828,517,462,696,385,1007,930,619,308,853,542,487,176,776,721,410,955,644,333, 878,567,256,801,746,435,124,980,669,358,903,592,226,826,515,460,694,383,1005, 928,617,306,851,540,485,174,774,719,408,953,642,876,565,510,799,744,433,122, 978,667,356,901,590,224] [views:debug,2014-08-19T16:50:42.745,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/333. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:42.745,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",333,active,0} [ns_server:debug,2014-08-19T16:50:42.820,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 331. Nacking mccouch update. [views:debug,2014-08-19T16:50:42.821,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/331. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:42.821,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",331,active,0} [ns_server:debug,2014-08-19T16:50:42.823,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,850,539,484,773,718,407,952,824,641,513,458,330,875,692,564,509,381,198, 1003,926,798,743,615,432,304,977,849,666,538,483,355,172,900,772,717,589,406, 278,951,823,640,512,457,146,874,691,563,508,380,1002,925,797,742,614,431,248, 120,976,848,665,537,482,354,899,771,716,588,405,222,950,822,767,639,456,328, 873,690,562,507,379,196,1001,924,796,741,613,430,302,975,847,664,536,481,353, 170,898,770,715,587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000, 923,795,740,612,429,246,118,974,846,663,535,480,352,897,769,714,586,403,220, 948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300, 973,845,662,534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,142, 998,870,687,559,504,376,921,793,738,610,427,244,116,972,844,661,533,478,350, 895,712,584,401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192, 920,792,737,609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022, 945,817,762,634,451,140,996,868,685,557,502,374,919,791,736,608,425,242,114, 970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995, 867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164, 892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500,372,917, 789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942, 814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967, 839,656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,136,992, 864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964, 836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158,886, 703,575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783, 728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753, 625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833,650, 522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675, 547,492,364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389, 206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908,780,725,597, 414,286,959,831,648,520,465,337,154,882,699,571,388,260,1010,805,750,439,128, 984,673,362,907,596,230,830,519,464,698,387,1009,932,621,310,855,544,489,178, 778,723,412,957,646,335,880,569,258,803,748,437,126,982,671,360,905,594,228, 828,517,462,696,385,1007,930,619,308,853,542,487,176,776,721,410,955,644,333, 878,567,256,801,746,435,124,980,669,358,903,592,226,826,515,460,694,383,1005, 928,617,306,851,540,485,174,774,719,408,953,642,331,876,565,510,799,744,433, 122,978,667,356,901,590,224] [views:debug,2014-08-19T16:50:42.854,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/331. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:42.855,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",331,active,0} [ns_server:debug,2014-08-19T16:50:42.949,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,916, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_916_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_916_'ns_1@10.242.238.89'">>}]}, {move_state,406, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_406_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_406_'ns_1@10.242.238.90'">>}]}, {move_state,662, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_662_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_662_'ns_1@10.242.238.89'">>}]}, {move_state,917, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_917_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_917_'ns_1@10.242.238.89'">>}]}, {move_state,407, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_407_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_407_'ns_1@10.242.238.90'">>}]}, {move_state,663, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_663_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_663_'ns_1@10.242.238.89'">>}]}, {move_state,918, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_918_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_918_'ns_1@10.242.238.89'">>}]}, {move_state,408, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_408_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_408_'ns_1@10.242.238.90'">>}]}, {move_state,664, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_664_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_664_'ns_1@10.242.238.89'">>}]}, {move_state,919, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_919_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_919_'ns_1@10.242.238.89'">>}]}, {move_state,409, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_409_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_409_'ns_1@10.242.238.90'">>}]}, {move_state,665, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_665_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_665_'ns_1@10.242.238.89'">>}]}, {move_state,920, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_920_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_920_'ns_1@10.242.238.89'">>}]}, {move_state,410, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_410_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_410_'ns_1@10.242.238.90'">>}]}, {move_state,666, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_666_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_666_'ns_1@10.242.238.89'">>}]}, {move_state,921, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_921_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_921_'ns_1@10.242.238.89'">>}]}, {move_state,411, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_411_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_411_'ns_1@10.242.238.90'">>}]}, {move_state,667, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_667_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_667_'ns_1@10.242.238.89'">>}]}, {move_state,922, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_922_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_922_'ns_1@10.242.238.89'">>}]}, {move_state,412, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_412_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_412_'ns_1@10.242.238.90'">>}]}, {move_state,668, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_668_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_668_'ns_1@10.242.238.89'">>}]}, {move_state,923, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_923_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_923_'ns_1@10.242.238.89'">>}]}, {move_state,413, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_413_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_413_'ns_1@10.242.238.90'">>}]}, {move_state,669, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_669_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_669_'ns_1@10.242.238.89'">>}]}, {move_state,924, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_924_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_924_'ns_1@10.242.238.89'">>}]}, {move_state,414, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_414_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_414_'ns_1@10.242.238.90'">>}]}, {move_state,670, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_670_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_670_'ns_1@10.242.238.89'">>}]}, {move_state,925, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_925_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_925_'ns_1@10.242.238.89'">>}]}, {move_state,415, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_415_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_415_'ns_1@10.242.238.90'">>}]}, {move_state,671, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_671_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_671_'ns_1@10.242.238.89'">>}]}, {move_state,926, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_926_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_926_'ns_1@10.242.238.89'">>}]}, {move_state,416, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_416_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_416_'ns_1@10.242.238.90'">>}]}, {move_state,672, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_672_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_672_'ns_1@10.242.238.89'">>}]}, {move_state,927, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_927_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_927_'ns_1@10.242.238.89'">>}]}, {move_state,417, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_417_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_417_'ns_1@10.242.238.90'">>}]}, {move_state,673, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_673_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_673_'ns_1@10.242.238.89'">>}]}, {move_state,928, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_928_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_928_'ns_1@10.242.238.89'">>}]}, {move_state,418, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_418_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_418_'ns_1@10.242.238.90'">>}]}, {move_state,674, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_674_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_674_'ns_1@10.242.238.89'">>}]}, {move_state,929, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_929_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_929_'ns_1@10.242.238.89'">>}]}, {move_state,419, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_419_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_419_'ns_1@10.242.238.90'">>}]}, {move_state,675, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_675_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_675_'ns_1@10.242.238.89'">>}]}, {move_state,930, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_930_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_930_'ns_1@10.242.238.89'">>}]}, {move_state,420, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_420_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_420_'ns_1@10.242.238.90'">>}]}, {move_state,676, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_676_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_676_'ns_1@10.242.238.89'">>}]}, {move_state,931, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_931_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_931_'ns_1@10.242.238.89'">>}]}, {move_state,421, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_421_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_421_'ns_1@10.242.238.90'">>}]}, {move_state,677, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_677_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_677_'ns_1@10.242.238.89'">>}]}, {move_state,932, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_932_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_932_'ns_1@10.242.238.89'">>}]}, {move_state,422, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_422_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_422_'ns_1@10.242.238.90'">>}]}, {move_state,678, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_678_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_678_'ns_1@10.242.238.89'">>}]}, {move_state,933, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_933_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_933_'ns_1@10.242.238.89'">>}]}, {move_state,423, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_423_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_423_'ns_1@10.242.238.90'">>}]}, {move_state,679, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_679_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_679_'ns_1@10.242.238.89'">>}]}, {move_state,934, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_934_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_934_'ns_1@10.242.238.89'">>}]}, {move_state,424, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_424_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_424_'ns_1@10.242.238.90'">>}]}, {move_state,680, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_680_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_680_'ns_1@10.242.238.89'">>}]}, {move_state,935, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_935_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_935_'ns_1@10.242.238.89'">>}]}, {move_state,425, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_425_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_425_'ns_1@10.242.238.90'">>}]}, {move_state,681, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_681_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_681_'ns_1@10.242.238.89'">>}]}, {move_state,936, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_936_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_936_'ns_1@10.242.238.89'">>}]}, {move_state,426, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_426_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_426_'ns_1@10.242.238.90'">>}]}, {move_state,682, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_682_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_682_'ns_1@10.242.238.91'">>}]}, {move_state,937, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_937_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_937_'ns_1@10.242.238.89'">>}]}] [ns_server:debug,2014-08-19T16:50:42.954,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 916, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.955,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 406, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:42.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 662, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 917, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 407, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:42.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 663, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 918, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 408, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:42.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 664, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 919, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 409, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:42.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 665, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 920, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 410, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:42.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 666, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 921, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 411, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:42.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 667, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 922, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 412, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:42.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 668, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 923, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 413, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:42.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 669, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 924, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 414, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:42.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 670, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 925, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 415, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:42.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 671, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 926, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 416, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:42.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 672, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 927, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 417, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:42.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 673, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 928, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 418, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:42.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 674, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 929, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 419, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:42.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 675, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 930, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 420, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:42.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 676, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 931, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 421, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:42.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 677, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 932, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 422, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:42.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 678, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 933, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 423, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:42.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 679, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 934, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 424, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:42.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 680, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 935, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 425, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:42.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 681, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 936, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:42.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 426, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:42.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 682, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.91',0}] [ns_server:debug,2014-08-19T16:50:42.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 937, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:43.029,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 329. Nacking mccouch update. [views:debug,2014-08-19T16:50:43.030,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/329. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:43.030,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",329,active,0} [ns_server:debug,2014-08-19T16:50:43.032,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,850,539,484,773,718,407,952,641,330,875,692,564,509,381,198,1003,926,798, 743,615,432,304,977,849,666,538,483,355,172,900,772,717,589,406,278,951,823, 640,512,457,329,146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976, 848,665,537,482,354,899,771,716,588,405,222,950,822,767,639,456,328,873,690, 562,507,379,196,1001,924,796,741,613,430,302,975,847,664,536,481,353,170,898, 770,715,587,404,276,949,821,766,638,455,144,872,689,561,506,378,1000,923,795, 740,612,429,246,118,974,846,663,535,480,352,897,769,714,586,403,220,948,820, 765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,973,845, 662,534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,142,998,870, 687,559,504,376,921,793,738,610,427,244,116,972,844,661,533,478,350,895,712, 584,401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920,792, 737,609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022,945,817, 762,634,451,140,996,868,685,557,502,374,919,791,736,608,425,242,114,970,842, 659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867,684, 556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892,709, 581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500,372,917,789,734, 606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942,814,759, 631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656, 528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,136,992,864,681, 553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889,706,578, 395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731, 603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756, 628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964,836,653, 525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861,678,550, 495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158,886,703,575, 392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728,600, 417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753,625,442, 314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833,650,522,467, 339,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547,492, 364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389,206,1011, 934,806,751,623,440,312,985,857,674,546,491,363,180,908,780,725,597,414,286, 959,831,648,520,465,337,154,882,699,571,388,260,1010,805,750,439,128,984,673, 362,907,596,230,830,519,464,698,387,1009,932,621,310,855,544,489,178,778,723, 412,957,646,335,880,569,258,803,748,437,126,982,671,360,905,594,228,828,517, 462,696,385,1007,930,619,308,853,542,487,176,776,721,410,955,644,333,878,567, 256,801,746,435,124,980,669,358,903,592,226,826,515,460,694,383,1005,928,617, 306,851,540,485,174,774,719,408,953,642,331,876,565,510,799,744,433,122,978, 667,356,901,590,224,824,513,458] [views:debug,2014-08-19T16:50:43.113,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/329. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:43.114,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",329,active,0} [ns_server:debug,2014-08-19T16:50:43.289,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 327. Nacking mccouch update. [views:debug,2014-08-19T16:50:43.289,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/327. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:43.289,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",327,active,0} [ns_server:debug,2014-08-19T16:50:43.291,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,850,539,484,773,718,407,952,641,330,875,692,564,509,381,198,1003,926,798, 743,615,432,304,977,849,666,538,483,355,172,900,772,717,589,406,278,951,823, 640,512,457,329,146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976, 848,665,537,482,354,899,771,716,588,405,222,950,822,767,639,456,328,873,690, 562,507,379,196,1001,924,796,741,613,430,302,975,847,664,536,481,353,170,898, 770,715,587,404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923, 795,740,612,429,246,118,974,846,663,535,480,352,897,769,714,586,403,220,948, 820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,973, 845,662,534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,142,998, 870,687,559,504,376,921,793,738,610,427,244,116,972,844,661,533,478,350,895, 712,584,401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920, 792,737,609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022,945, 817,762,634,451,140,996,868,685,557,502,374,919,791,736,608,425,242,114,970, 842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867, 684,556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892, 709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500,372,917,789, 734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942,814, 759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839, 656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,136,992,864, 681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889,706, 578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786, 731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811, 756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964,836, 653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861,678, 550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158,886,703, 575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783,728, 600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753,625, 442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833,650,522, 467,339,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675,547, 492,364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389,206, 1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908,780,725,597,414, 286,959,831,648,520,465,337,154,882,699,571,388,260,1010,805,750,439,128,984, 673,362,907,596,230,830,519,464,698,387,1009,932,621,310,855,544,489,178,778, 723,412,957,646,335,880,569,258,803,748,437,126,982,671,360,905,594,228,828, 517,462,696,385,1007,930,619,308,853,542,487,176,776,721,410,955,644,333,878, 567,256,801,746,435,124,980,669,358,903,592,226,826,515,460,694,383,1005,928, 617,306,851,540,485,174,774,719,408,953,642,331,876,565,510,799,744,433,122, 978,667,356,901,590,224,824,513,458] [views:debug,2014-08-19T16:50:43.324,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/327. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:43.324,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",327,active,0} [ns_server:debug,2014-08-19T16:50:43.474,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 325. Nacking mccouch update. [views:debug,2014-08-19T16:50:43.474,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/325. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:43.474,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",325,active,0} [ns_server:debug,2014-08-19T16:50:43.476,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,850,539,484,773,718,407,952,641,330,875,692,564,509,381,198,1003,926,798, 743,615,432,304,977,849,666,538,483,355,172,900,772,717,589,406,278,951,823, 640,512,457,329,146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976, 848,665,537,482,354,899,771,716,588,405,222,950,822,767,639,456,328,873,690, 562,507,379,196,1001,924,796,741,613,430,302,975,847,664,536,481,353,170,898, 770,715,587,404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923, 795,740,612,429,246,118,974,846,663,535,480,352,897,769,714,586,403,220,948, 820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,973, 845,662,534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,325,142, 998,870,687,559,504,376,921,793,738,610,427,244,116,972,844,661,533,478,350, 895,712,584,401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192, 920,792,737,609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022, 945,817,762,634,451,140,996,868,685,557,502,374,919,791,736,608,425,242,114, 970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995, 867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164, 892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500,372,917, 789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942, 814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967, 839,656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,136,992, 864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964, 836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158,886, 703,575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783, 728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753, 625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833,650, 522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675, 547,492,364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389, 206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908,780,725,597, 414,286,959,831,648,520,465,337,154,882,699,571,388,260,1010,805,750,439,128, 984,673,362,907,596,230,830,519,464,698,387,1009,932,621,310,855,544,489,178, 778,723,412,957,646,335,880,569,258,803,748,437,126,982,671,360,905,594,228, 828,517,462,696,385,1007,930,619,308,853,542,487,176,776,721,410,955,644,333, 878,567,256,801,746,435,124,980,669,358,903,592,226,826,515,460,694,383,1005, 928,617,306,851,540,485,174,774,719,408,953,642,331,876,565,510,799,744,433, 122,978,667,356,901,590,224,824,513,458] [views:debug,2014-08-19T16:50:43.508,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/325. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:43.508,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",325,active,0} [ns_server:debug,2014-08-19T16:50:43.607,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 323. Nacking mccouch update. [views:debug,2014-08-19T16:50:43.607,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/323. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:43.608,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",323,active,0} [ns_server:debug,2014-08-19T16:50:43.609,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,850,539,484,773,718,407,952,641,330,875,692,564,509,381,198,1003,926,798, 743,615,432,304,977,849,666,538,483,355,172,900,772,717,589,406,278,951,823, 640,512,457,329,146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976, 848,665,537,482,354,899,771,716,588,405,222,950,822,767,639,456,328,873,690, 562,507,379,196,1001,924,796,741,613,430,302,975,847,664,536,481,353,170,898, 770,715,587,404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923, 795,740,612,429,246,118,974,846,663,535,480,352,897,769,714,586,403,220,948, 820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,973, 845,662,534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,325,142, 998,870,687,559,504,376,921,793,738,610,427,244,116,972,844,661,533,478,350, 895,712,584,401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192, 920,792,737,609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022, 945,817,762,634,451,323,140,996,868,685,557,502,374,919,791,736,608,425,242, 114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322, 995,867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347, 164,892,709,581,398,270,1020,943,815,760,632,449,138,994,866,683,555,500,372, 917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019, 942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294, 967,839,656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,136, 992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344, 889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186, 914,786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016, 939,811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108, 964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989, 861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158, 886,703,575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911, 783,728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808, 753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833, 650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858, 675,547,492,364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572, 389,206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908,780,725, 597,414,286,959,831,648,520,465,337,154,882,699,571,388,260,1010,805,750,439, 128,984,673,362,907,596,230,830,519,464,698,387,1009,932,621,310,855,544,489, 178,778,723,412,957,646,335,880,569,258,803,748,437,126,982,671,360,905,594, 228,828,517,462,696,385,1007,930,619,308,853,542,487,176,776,721,410,955,644, 333,878,567,256,801,746,435,124,980,669,358,903,592,226,826,515,460,694,383, 1005,928,617,306,851,540,485,174,774,719,408,953,642,331,876,565,510,799,744, 433,122,978,667,356,901,590,224,824,513,458] [views:debug,2014-08-19T16:50:43.641,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/323. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:43.641,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",323,active,0} [ns_server:debug,2014-08-19T16:50:43.716,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 321. Nacking mccouch update. [views:debug,2014-08-19T16:50:43.716,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/321. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:43.716,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",321,active,0} [ns_server:debug,2014-08-19T16:50:43.718,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,850,539,484,773,718,407,952,641,330,875,692,564,509,381,198,1003,926,798, 743,615,432,304,977,849,666,538,483,355,172,900,772,717,589,406,278,951,823, 640,512,457,329,146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976, 848,665,537,482,354,899,771,716,588,405,222,950,822,767,639,456,328,873,690, 562,507,379,196,1001,924,796,741,613,430,302,975,847,664,536,481,353,170,898, 770,715,587,404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923, 795,740,612,429,246,118,974,846,663,535,480,352,897,769,714,586,403,220,948, 820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,973, 845,662,534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,325,142, 998,870,687,559,504,376,921,793,738,610,427,244,116,972,844,661,533,478,350, 895,712,584,401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192, 920,792,737,609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022, 945,817,762,634,451,323,140,996,868,685,557,502,374,919,791,736,608,425,242, 114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322, 995,867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347, 164,892,709,581,398,270,1020,943,815,760,632,449,321,138,994,866,683,555,500, 372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214, 1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422, 294,967,839,656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447, 136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472, 344,889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369, 186,914,786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266, 1016,939,811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236, 108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316, 989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341, 158,886,703,575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366, 911,783,728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936, 808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961, 833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,130,986, 858,675,547,492,364,909,781,726,598,415,232,960,832,649,521,466,338,883,700, 572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908,780, 725,597,414,286,959,831,648,520,465,337,154,882,699,571,388,260,1010,805,750, 439,128,984,673,362,907,596,230,830,519,464,698,387,1009,932,621,310,855,544, 489,178,778,723,412,957,646,335,880,569,258,803,748,437,126,982,671,360,905, 594,228,828,517,462,696,385,1007,930,619,308,853,542,487,176,776,721,410,955, 644,333,878,567,256,801,746,435,124,980,669,358,903,592,226,826,515,460,694, 383,1005,928,617,306,851,540,485,174,774,719,408,953,642,331,876,565,510,799, 744,433,122,978,667,356,901,590,224,824,513,458] [views:debug,2014-08-19T16:50:43.750,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/321. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:43.751,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",321,active,0} [ns_server:debug,2014-08-19T16:50:43.825,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 319. Nacking mccouch update. [views:debug,2014-08-19T16:50:43.825,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/319. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:43.826,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",319,active,0} [ns_server:debug,2014-08-19T16:50:43.827,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,850,539,484,773,718,407,952,641,330,875,564,509,198,926,798,743,615,432, 304,977,849,666,538,483,355,172,900,772,717,589,406,278,951,823,640,512,457, 329,146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976,848,665,537, 482,354,899,771,716,588,405,222,950,822,767,639,456,328,873,690,562,507,379, 196,1001,924,796,741,613,430,302,975,847,664,536,481,353,170,898,770,715,587, 404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612, 429,246,118,974,846,663,535,480,352,897,769,714,586,403,220,948,820,765,637, 454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,973,845,662,534, 479,351,168,896,768,713,585,402,274,947,819,764,636,453,325,142,998,870,687, 559,504,376,921,793,738,610,427,244,116,972,844,661,533,478,350,895,712,584, 401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737, 609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022,945,817,762, 634,451,323,140,996,868,685,557,502,374,919,791,736,608,425,242,114,970,842, 659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867,684, 556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892,709, 581,398,270,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,917,789, 734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942,814, 759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839, 656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992, 864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964, 836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158,886, 703,575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911,783, 728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808,753, 625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833,650, 522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858,675, 547,492,364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572,389, 206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908,780,725,597, 414,286,959,831,648,520,465,337,154,882,699,571,388,260,1010,805,750,439,128, 984,673,362,907,596,230,830,519,464,698,387,1009,932,621,310,855,544,489,178, 778,723,412,957,646,335,880,569,258,803,748,437,126,982,671,360,905,594,228, 828,517,462,696,385,1007,930,619,308,853,542,487,176,776,721,410,955,644,333, 878,567,256,801,746,435,124,980,669,358,903,592,226,826,515,460,694,383,1005, 928,617,306,851,540,485,174,774,719,408,953,642,331,876,565,510,799,744,433, 122,978,667,356,901,590,224,824,513,458,692,381,1003] [views:debug,2014-08-19T16:50:43.859,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/319. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:43.860,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",319,active,0} [ns_server:debug,2014-08-19T16:50:43.934,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 317. Nacking mccouch update. [views:debug,2014-08-19T16:50:43.935,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/317. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:43.935,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",317,active,0} [ns_server:debug,2014-08-19T16:50:43.937,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,850,539,484,773,718,407,952,641,330,875,564,509,198,926,798,743,615,432, 304,977,849,666,538,483,355,172,900,772,717,589,406,278,951,823,640,512,457, 329,146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976,848,665,537, 482,354,899,771,716,588,405,222,950,822,767,639,456,328,873,690,562,507,379, 196,1001,924,796,741,613,430,302,975,847,664,536,481,353,170,898,770,715,587, 404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612, 429,246,118,974,846,663,535,480,352,897,769,714,586,403,220,948,820,765,637, 454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,973,845,662,534, 479,351,168,896,768,713,585,402,274,947,819,764,636,453,325,142,998,870,687, 559,504,376,921,793,738,610,427,244,116,972,844,661,533,478,350,895,712,584, 401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737, 609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022,945,817,762, 634,451,323,140,996,868,685,557,502,374,919,791,736,608,425,242,114,970,842, 659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867,684, 556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892,709, 581,398,270,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,917,789, 734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942,814, 759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839, 656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992, 864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,236,108, 964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989, 861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158, 886,703,575,392,264,1014,937,809,754,626,443,132,988,860,677,549,494,366,911, 783,728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808, 753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833, 650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,130,986,858, 675,547,492,364,909,781,726,598,415,232,960,832,649,521,466,338,883,700,572, 389,206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908,780,725, 597,414,286,959,831,648,520,465,337,154,882,699,571,388,260,1010,805,750,439, 128,984,673,362,907,596,230,830,519,464,698,387,1009,932,621,310,855,544,489, 178,778,723,412,957,646,335,880,569,258,803,748,437,126,982,671,360,905,594, 228,828,517,462,696,385,1007,930,619,308,853,542,487,176,776,721,410,955,644, 333,878,567,256,801,746,435,124,980,669,358,903,592,226,826,515,460,694,383, 1005,928,617,306,851,540,485,174,774,719,408,953,642,331,876,565,510,799,744, 433,122,978,667,356,901,590,224,824,513,458,692,381,1003] [views:debug,2014-08-19T16:50:43.968,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/317. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:43.969,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",317,active,0} [ns_server:debug,2014-08-19T16:50:44.075,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 315. Nacking mccouch update. [views:debug,2014-08-19T16:50:44.075,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/315. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:44.075,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",315,active,0} [ns_server:debug,2014-08-19T16:50:44.077,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,850,539,484,773,718,407,952,641,330,875,564,509,198,926,798,743,615,432, 304,977,849,666,538,483,355,172,900,772,717,589,406,278,951,823,640,512,457, 329,146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976,848,665,537, 482,354,899,771,716,588,405,222,950,822,767,639,456,328,873,690,562,507,379, 196,1001,924,796,741,613,430,302,975,847,664,536,481,353,170,898,770,715,587, 404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612, 429,246,118,974,846,663,535,480,352,897,769,714,586,403,220,948,820,765,637, 454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,973,845,662,534, 479,351,168,896,768,713,585,402,274,947,819,764,636,453,325,142,998,870,687, 559,504,376,921,793,738,610,427,244,116,972,844,661,533,478,350,895,712,584, 401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737, 609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022,945,817,762, 634,451,323,140,996,868,685,557,502,374,919,791,736,608,425,242,114,970,842, 659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867,684, 556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892,709, 581,398,270,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,917,789, 734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942,814, 759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839, 656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992, 864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,236,108, 964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989, 861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158, 886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 911,783,728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936, 808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961, 833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,130,986, 858,675,547,492,364,909,781,726,598,415,232,960,832,649,521,466,338,883,700, 572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908,780, 725,597,414,286,959,831,648,520,465,337,154,882,699,571,388,260,1010,805,750, 439,128,984,673,362,907,596,230,830,519,464,698,387,1009,932,621,310,855,544, 489,178,778,723,412,957,646,335,880,569,258,803,748,437,126,982,671,360,905, 594,228,828,517,462,696,385,1007,930,619,308,853,542,487,176,776,721,410,955, 644,333,878,567,256,801,746,435,124,980,669,358,903,592,226,826,515,460,694, 383,1005,928,617,306,851,540,485,174,774,719,408,953,642,331,876,565,510,799, 744,433,122,978,667,356,901,590,224,824,513,458,692,381,1003] [views:debug,2014-08-19T16:50:44.159,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/315. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:44.159,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",315,active,0} [ns_server:debug,2014-08-19T16:50:44.259,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 313. Nacking mccouch update. [views:debug,2014-08-19T16:50:44.259,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/313. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:44.259,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",313,active,0} [ns_server:debug,2014-08-19T16:50:44.261,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983,672, 361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777,722, 411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516,461, 150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566,511, 200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927,616, 250,850,539,484,773,718,407,952,641,330,875,564,509,198,926,798,743,615,432, 304,977,849,666,538,483,355,172,900,772,717,589,406,278,951,823,640,512,457, 329,146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976,848,665,537, 482,354,899,771,716,588,405,222,950,822,767,639,456,328,873,690,562,507,379, 196,1001,924,796,741,613,430,302,975,847,664,536,481,353,170,898,770,715,587, 404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612, 429,246,118,974,846,663,535,480,352,897,769,714,586,403,220,948,820,765,637, 454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,973,845,662,534, 479,351,168,896,768,713,585,402,274,947,819,764,636,453,325,142,998,870,687, 559,504,376,921,793,738,610,427,244,116,972,844,661,533,478,350,895,712,584, 401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737, 609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022,945,817,762, 634,451,323,140,996,868,685,557,502,374,919,791,736,608,425,242,114,970,842, 659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867,684, 556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892,709, 581,398,270,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,917,789, 734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942,814, 759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839, 656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992, 864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,236,108, 964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989, 861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158, 886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 911,783,728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936, 808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961, 833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,313,130, 986,858,675,547,492,364,909,781,726,598,415,232,960,832,649,521,466,338,883, 700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908, 780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388,260,1010,805, 750,439,128,984,673,362,907,596,230,830,519,464,698,387,1009,932,621,310,855, 544,489,178,778,723,412,957,646,335,880,569,258,803,748,437,126,982,671,360, 905,594,228,828,517,462,696,385,1007,930,619,308,853,542,487,176,776,721,410, 955,644,333,878,567,256,801,746,435,124,980,669,358,903,592,226,826,515,460, 694,383,1005,928,617,306,851,540,485,174,774,719,408,953,642,331,876,565,510, 799,744,433,122,978,667,356,901,590,224,824,513,458,692,381,1003] [views:debug,2014-08-19T16:50:44.326,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/313. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:44.326,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",313,active,0} [ns_server:debug,2014-08-19T16:50:44.418,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 311. Nacking mccouch update. [views:debug,2014-08-19T16:50:44.418,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/311. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:44.418,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",311,active,0} [ns_server:debug,2014-08-19T16:50:44.420,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,254,854,543,488,777, 722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827,516, 461,150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877,566, 511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004,927, 616,250,850,539,484,773,718,407,952,641,330,875,564,509,198,926,798,743,615, 432,304,977,849,666,538,483,355,172,900,772,717,589,406,278,951,823,640,512, 457,329,146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976,848,665, 537,482,354,899,771,716,588,405,222,950,822,767,639,456,328,873,690,562,507, 379,196,1001,924,796,741,613,430,302,975,847,664,536,481,353,170,898,770,715, 587,404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740, 612,429,246,118,974,846,663,535,480,352,897,769,714,586,403,220,948,820,765, 637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,973,845,662, 534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,325,142,998,870, 687,559,504,376,921,793,738,610,427,244,116,972,844,661,533,478,350,895,712, 584,401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920,792, 737,609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022,945,817, 762,634,451,323,140,996,868,685,557,502,374,919,791,736,608,425,242,114,970, 842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867, 684,556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892, 709,581,398,270,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,917, 789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942, 814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967, 839,656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136, 992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344, 889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186, 914,786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016, 939,811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,236, 108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316, 989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341, 158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494, 366,911,783,728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013, 936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288, 961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,313, 130,986,858,675,547,492,364,909,781,726,598,415,232,960,832,649,521,466,338, 883,700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180, 908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388,260,1010, 805,750,439,128,984,673,362,907,596,230,830,519,464,698,387,1009,932,621,310, 855,544,489,178,778,723,412,957,646,335,880,569,258,803,748,437,126,982,671, 360,905,594,228,828,517,462,696,385,1007,930,619,308,853,542,487,176,776,721, 410,955,644,333,878,567,256,801,746,435,124,980,669,358,903,592,226,826,515, 460,694,383,1005,928,617,306,851,540,485,174,774,719,408,953,642,331,876,565, 510,799,744,433,122,978,667,356,901,590,224,824,513,458,692,381,1003] [views:debug,2014-08-19T16:50:44.468,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/311. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:44.469,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",311,active,0} [ns_server:debug,2014-08-19T16:50:44.544,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 309. Nacking mccouch update. [views:debug,2014-08-19T16:50:44.544,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/309. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:44.544,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",309,active,0} [ns_server:debug,2014-08-19T16:50:44.546,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,252,852,541,486,775,720,409,954,643,332,877, 566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382,1004, 927,616,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798,743,432, 977,849,666,538,483,355,172,900,772,717,589,406,278,951,823,640,512,457,329, 146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976,848,665,537,482, 354,899,771,716,588,405,222,950,822,767,639,456,328,873,690,562,507,379,196, 1001,924,796,741,613,430,302,975,847,664,536,481,353,170,898,770,715,587,404, 276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612,429, 246,118,974,846,663,535,480,352,897,769,714,586,403,220,948,820,765,637,454, 326,999,871,688,560,505,377,194,922,794,739,611,428,300,973,845,662,534,479, 351,168,896,768,713,585,402,274,947,819,764,636,453,325,142,998,870,687,559, 504,376,921,793,738,610,427,244,116,972,844,661,533,478,350,895,712,584,401, 218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609, 426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022,945,817,762,634, 451,323,140,996,868,685,557,502,374,919,791,736,608,425,242,114,970,842,659, 531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867,684,556, 501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892,709,581, 398,270,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,917,789,734, 606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942,814,759, 631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656, 528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864, 681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889,706, 578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786, 731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811, 756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,236,108,964, 836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158,886, 703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,911, 783,728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808, 753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833, 650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,313,130,986, 858,675,547,492,364,909,781,726,598,415,232,960,832,649,521,466,338,883,700, 572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908,780, 725,597,414,286,959,831,648,520,465,337,154,882,699,571,388,260,1010,805,750, 439,128,984,673,362,907,596,230,830,519,464,698,387,1009,932,621,310,855,544, 489,178,778,723,412,957,646,335,880,569,258,803,748,437,126,982,671,360,905, 594,228,828,517,462,696,385,1007,930,619,308,853,542,487,176,776,721,410,955, 644,333,878,567,256,801,746,435,124,980,669,358,903,592,226,826,515,460,694, 383,1005,928,617,306,851,540,485,174,774,719,408,953,642,331,876,565,510,799, 744,433,122,978,667,356,901,590,224,824,513,458,692,381,1003,926,615,304] [views:debug,2014-08-19T16:50:44.594,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/309. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:44.595,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",309,active,0} [ns_server:debug,2014-08-19T16:50:44.686,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 307. Nacking mccouch update. [views:debug,2014-08-19T16:50:44.686,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/307. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:44.686,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",307,active,0} [ns_server:debug,2014-08-19T16:50:44.688,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798,743, 432,977,849,666,538,483,355,172,900,772,717,589,406,278,951,823,640,512,457, 329,146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976,848,665,537, 482,354,899,771,716,588,405,222,950,822,767,639,456,328,873,690,562,507,379, 196,1001,924,796,741,613,430,302,975,847,664,536,481,353,170,898,770,715,587, 404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612, 429,246,118,974,846,663,535,480,352,897,769,714,586,403,220,948,820,765,637, 454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,973,845,662,534, 479,351,168,896,768,713,585,402,274,947,819,764,636,453,325,142,998,870,687, 559,504,376,921,793,738,610,427,244,116,972,844,661,533,478,350,895,712,584, 401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737, 609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022,945,817,762, 634,451,323,140,996,868,685,557,502,374,919,791,736,608,425,242,114,970,842, 659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867,684, 556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892,709, 581,398,270,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,917,789, 734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942,814, 759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839, 656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992, 864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,236,108, 964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989, 861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158, 886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 911,783,728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936, 808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961, 833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,313,130, 986,858,675,547,492,364,909,781,726,598,415,232,960,832,649,521,466,338,883, 700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908, 780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388,260,1010,805, 750,439,128,984,673,362,907,596,230,830,519,464,698,387,1009,932,621,310,855, 544,489,178,778,723,412,957,646,335,880,569,258,803,748,437,126,982,671,360, 905,594,228,828,517,462,696,385,1007,930,619,308,853,542,487,176,776,721,410, 955,644,333,878,567,256,801,746,435,124,980,669,358,903,592,226,826,515,460, 694,383,1005,928,617,306,851,540,485,174,774,719,408,953,642,331,876,565,510, 799,744,433,122,978,667,356,901,590,224,824,513,458,692,381,1003,926,615,304] [views:debug,2014-08-19T16:50:44.761,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/307. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:44.762,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",307,active,0} [ns_server:debug,2014-08-19T16:50:44.844,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 305. Nacking mccouch update. [views:debug,2014-08-19T16:50:44.844,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/305. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:44.845,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",305,active,0} [ns_server:debug,2014-08-19T16:50:44.846,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,849,666,538,483,355,172,900,772,717,589,406,278,951,823,640,512, 457,329,146,874,691,563,508,380,1002,925,797,742,614,431,248,120,976,848,665, 537,482,354,899,771,716,588,405,222,950,822,767,639,456,328,873,690,562,507, 379,196,1001,924,796,741,613,430,302,975,847,664,536,481,353,170,898,770,715, 587,404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740, 612,429,246,118,974,846,663,535,480,352,897,769,714,586,403,220,948,820,765, 637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,973,845,662, 534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,325,142,998,870, 687,559,504,376,921,793,738,610,427,244,116,972,844,661,533,478,350,895,712, 584,401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920,792, 737,609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022,945,817, 762,634,451,323,140,996,868,685,557,502,374,919,791,736,608,425,242,114,970, 842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867, 684,556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892, 709,581,398,270,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,917, 789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942, 814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967, 839,656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136, 992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344, 889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186, 914,786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016, 939,811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,236, 108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316, 989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341, 158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494, 366,911,783,728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013, 936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288, 961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,313, 130,986,858,675,547,492,364,909,781,726,598,415,232,960,832,649,521,466,338, 883,700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180, 908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388,260,1010, 805,750,439,128,984,673,362,907,596,230,830,519,464,698,387,1009,932,621,310, 855,544,489,178,778,723,412,957,646,335,880,569,258,803,748,437,126,982,671, 360,905,594,228,828,517,462,696,385,1007,930,619,308,853,542,487,176,776,721, 410,955,644,333,878,567,256,801,746,435,124,980,669,358,903,592,226,826,515, 460,694,383,1005,928,617,306,851,540,485,174,774,719,408,953,642,331,876,565, 510,799,744,433,122,978,667,356,901,590,224,824,513,458,692,381,1003,926,615, 304] [views:debug,2014-08-19T16:50:44.895,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/305. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:44.895,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",305,active,0} [ns_server:debug,2014-08-19T16:50:44.987,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 303. Nacking mccouch update. [views:debug,2014-08-19T16:50:44.987,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/303. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:44.987,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",303,active,0} [ns_server:debug,2014-08-19T16:50:44.989,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,849,666,538,483,355,172,900,772,717,589,406,278,951,823,640,512, 457,329,146,874,691,563,508,380,1002,925,797,742,614,431,303,248,120,976,848, 665,537,482,354,899,771,716,588,405,222,950,822,767,639,456,328,873,690,562, 507,379,196,1001,924,796,741,613,430,302,975,847,664,536,481,353,170,898,770, 715,587,404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795, 740,612,429,246,118,974,846,663,535,480,352,897,769,714,586,403,220,948,820, 765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,973,845, 662,534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,325,142,998, 870,687,559,504,376,921,793,738,610,427,244,116,972,844,661,533,478,350,895, 712,584,401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920, 792,737,609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022,945, 817,762,634,451,323,140,996,868,685,557,502,374,919,791,736,608,425,242,114, 970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995, 867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164, 892,709,581,398,270,1020,943,815,760,632,449,321,138,994,866,683,555,500,372, 917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019, 942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294, 967,839,656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319, 136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472, 344,889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369, 186,914,786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266, 1016,939,811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419, 236,108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444, 316,989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469, 341,158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549, 494,366,911,783,728,600,417,234,962,834,651,523,468,340,885,702,574,391,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441, 313,130,986,858,675,547,492,364,909,781,726,598,415,232,960,832,649,521,466, 338,883,700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491,363, 180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388,260, 1010,805,750,439,128,984,673,362,907,596,230,830,519,464,698,387,1009,932, 621,310,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748,437,126, 982,671,360,905,594,228,828,517,462,696,385,1007,930,619,308,853,542,487,176, 776,721,410,955,644,333,878,567,256,801,746,435,124,980,669,358,903,592,226, 826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719,408,953,642,331, 876,565,510,799,744,433,122,978,667,356,901,590,224,824,513,458,692,381,1003, 926,615,304] [views:debug,2014-08-19T16:50:45.038,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/303. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:45.038,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",303,active,0} [ns_server:debug,2014-08-19T16:50:45.146,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 301. Nacking mccouch update. [views:debug,2014-08-19T16:50:45.146,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/301. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:45.146,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",301,active,0} [ns_server:debug,2014-08-19T16:50:45.148,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,849,666,538,483,355,172,900,772,717,589,406,278,951,823,640,512, 457,329,146,874,691,563,508,380,1002,925,797,742,614,431,303,248,120,976,848, 665,537,482,354,899,771,716,588,405,222,950,822,767,639,456,328,873,690,562, 507,379,196,1001,924,796,741,613,430,302,975,847,664,536,481,353,170,898,770, 715,587,404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795, 740,612,429,301,246,118,974,846,663,535,480,352,897,769,714,586,403,220,948, 820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,973, 845,662,534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,325,142, 998,870,687,559,504,376,921,793,738,610,427,244,116,972,844,661,533,478,350, 895,712,584,401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192, 920,792,737,609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022, 945,817,762,634,451,323,140,996,868,685,557,502,374,919,791,736,608,425,242, 114,970,842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322, 995,867,684,556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347, 164,892,709,581,398,270,1020,943,815,760,632,449,321,138,994,866,683,555,500, 372,917,789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214, 1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422, 294,967,839,656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447, 319,136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527, 472,344,889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497, 369,186,914,786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394, 266,1016,939,811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602, 419,236,108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627, 444,316,989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524, 469,341,158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677, 549,494,366,911,783,728,600,417,234,962,834,651,523,468,340,885,702,574,391, 208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599, 416,288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,909,781,726,598,415,232,960,832,649,521, 466,338,883,700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491, 363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388, 260,1010,805,750,439,128,984,673,362,907,596,230,830,519,464,698,387,1009, 932,621,310,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748,437, 126,982,671,360,905,594,228,828,517,462,696,385,1007,930,619,308,853,542,487, 176,776,721,410,955,644,333,878,567,256,801,746,435,124,980,669,358,903,592, 226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719,408,953,642, 331,876,565,510,799,744,433,122,978,667,356,901,590,224,824,513,458,692,381, 1003,926,615,304] [views:debug,2014-08-19T16:50:45.196,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/301. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:45.197,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",301,active,0} [ns_server:debug,2014-08-19T16:50:45.288,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 299. Nacking mccouch update. [views:debug,2014-08-19T16:50:45.288,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/299. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:45.288,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",299,active,0} [ns_server:debug,2014-08-19T16:50:45.290,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,666,355,900,772,717,589,406,278,951,823,640,512,457,329,146,874, 691,563,508,380,1002,925,797,742,614,431,303,248,120,976,848,665,537,482,354, 899,771,716,588,405,222,950,822,767,639,456,328,873,690,562,507,379,196,1001, 924,796,741,613,430,302,975,847,664,536,481,353,170,898,770,715,587,404,276, 949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612,429,301, 246,118,974,846,663,535,480,352,897,769,714,586,403,220,948,820,765,637,454, 326,999,871,688,560,505,377,194,922,794,739,611,428,300,973,845,662,534,479, 351,168,896,768,713,585,402,274,947,819,764,636,453,325,142,998,870,687,559, 504,376,921,793,738,610,427,299,244,116,972,844,661,533,478,350,895,712,584, 401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737, 609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022,945,817,762, 634,451,323,140,996,868,685,557,502,374,919,791,736,608,425,242,114,970,842, 659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867,684, 556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892,709, 581,398,270,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,917,789, 734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942,814, 759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839, 656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992, 864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,236,108, 964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989, 861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158, 886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 911,783,728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013,936, 808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961, 833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,313,130, 986,858,675,547,492,364,909,781,726,598,415,232,960,832,649,521,466,338,883, 700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908, 780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388,260,1010,805, 750,439,128,984,673,362,907,596,230,830,519,464,698,387,1009,932,621,310,855, 544,489,178,778,723,412,957,646,335,880,569,258,803,748,437,126,982,671,360, 905,594,228,828,517,462,696,385,1007,930,619,308,853,542,487,176,776,721,410, 955,644,333,878,567,256,801,746,435,124,980,669,358,903,592,226,826,515,460, 694,383,1005,928,617,306,851,540,485,174,774,719,408,953,642,331,876,565,510, 799,744,433,122,978,667,356,901,590,224,824,513,458,692,381,1003,926,615,304, 849,538,483,172] [views:debug,2014-08-19T16:50:45.364,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/299. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:45.364,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",299,active,0} [ns_server:debug,2014-08-19T16:50:45.495,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 297. Nacking mccouch update. [views:debug,2014-08-19T16:50:45.495,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/297. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:45.495,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",297,active,0} [ns_server:debug,2014-08-19T16:50:45.497,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,666,355,900,772,717,589,406,278,951,823,640,512,457,329,146,874, 691,563,508,380,1002,925,797,742,614,431,303,248,120,976,848,665,537,482,354, 899,771,716,588,405,222,950,822,767,639,456,328,873,690,562,507,379,196,1001, 924,796,741,613,430,302,975,847,664,536,481,353,170,898,770,715,587,404,276, 949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612,429,301, 246,118,974,846,663,535,480,352,897,769,714,586,403,220,948,820,765,637,454, 326,999,871,688,560,505,377,194,922,794,739,611,428,300,973,845,662,534,479, 351,168,896,768,713,585,402,274,947,819,764,636,453,325,142,998,870,687,559, 504,376,921,793,738,610,427,299,244,116,972,844,661,533,478,350,895,712,584, 401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737, 609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022,945,817,762, 634,451,323,140,996,868,685,557,502,374,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867, 684,556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892, 709,581,398,270,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,917, 789,734,606,423,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942, 814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967, 839,656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136, 992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472,344, 889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186, 914,786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016, 939,811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,236, 108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316, 989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341, 158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494, 366,911,783,728,600,417,234,962,834,651,523,468,340,885,702,574,391,208,1013, 936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288, 961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,313, 130,986,858,675,547,492,364,909,781,726,598,415,232,960,832,649,521,466,338, 883,700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180, 908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388,260,1010, 805,750,439,128,984,673,362,907,596,230,830,519,464,698,387,1009,932,621,310, 855,544,489,178,778,723,412,957,646,335,880,569,258,803,748,437,126,982,671, 360,905,594,228,828,517,462,696,385,1007,930,619,308,853,542,487,176,776,721, 410,955,644,333,878,567,256,801,746,435,124,980,669,358,903,592,226,826,515, 460,694,383,1005,928,617,306,851,540,485,174,774,719,408,953,642,331,876,565, 510,799,744,433,122,978,667,356,901,590,224,824,513,458,692,381,1003,926,615, 304,849,538,483,172] [views:debug,2014-08-19T16:50:45.554,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/297. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:45.554,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",297,active,0} [ns_server:debug,2014-08-19T16:50:45.654,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 295. Nacking mccouch update. [views:debug,2014-08-19T16:50:45.654,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/295. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:45.655,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",295,active,0} [ns_server:debug,2014-08-19T16:50:45.656,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,666,355,900,772,717,589,406,278,951,823,640,512,457,329,146,874, 691,563,508,380,1002,925,797,742,614,431,303,248,120,976,848,665,537,482,354, 899,771,716,588,405,222,950,822,767,639,456,328,873,690,562,507,379,196,1001, 924,796,741,613,430,302,975,847,664,536,481,353,170,898,770,715,587,404,276, 949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612,429,301, 246,118,974,846,663,535,480,352,897,769,714,586,403,220,948,820,765,637,454, 326,999,871,688,560,505,377,194,922,794,739,611,428,300,973,845,662,534,479, 351,168,896,768,713,585,402,274,947,819,764,636,453,325,142,998,870,687,559, 504,376,921,793,738,610,427,299,244,116,972,844,661,533,478,350,895,712,584, 401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737, 609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022,945,817,762, 634,451,323,140,996,868,685,557,502,374,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867, 684,556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892, 709,581,398,270,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,917, 789,734,606,423,295,240,112,968,840,657,529,474,346,891,708,580,397,214,1019, 942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294, 967,839,656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319, 136,992,864,681,553,498,370,915,787,732,604,421,238,110,966,838,655,527,472, 344,889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369, 186,914,786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266, 1016,939,811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419, 236,108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444, 316,989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469, 341,158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549, 494,366,911,783,728,600,417,234,962,834,651,523,468,340,885,702,574,391,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441, 313,130,986,858,675,547,492,364,909,781,726,598,415,232,960,832,649,521,466, 338,883,700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491,363, 180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388,260, 1010,805,750,439,128,984,673,362,907,596,230,830,519,464,698,387,1009,932, 621,310,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748,437,126, 982,671,360,905,594,228,828,517,462,696,385,1007,930,619,308,853,542,487,176, 776,721,410,955,644,333,878,567,256,801,746,435,124,980,669,358,903,592,226, 826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719,408,953,642,331, 876,565,510,799,744,433,122,978,667,356,901,590,224,824,513,458,692,381,1003, 926,615,304,849,538,483,172] [views:debug,2014-08-19T16:50:45.713,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/295. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:45.713,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",295,active,0} [ns_server:debug,2014-08-19T16:50:45.788,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 293. Nacking mccouch update. [views:debug,2014-08-19T16:50:45.788,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/293. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:45.789,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",293,active,0} [ns_server:debug,2014-08-19T16:50:45.790,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,666,355,900,772,717,589,406,278,951,823,640,512,457,329,146,874, 691,563,508,380,1002,925,797,742,614,431,303,248,120,976,848,665,537,482,354, 899,771,716,588,405,222,950,822,767,639,456,328,873,690,562,507,379,196,1001, 924,796,741,613,430,302,975,847,664,536,481,353,170,898,770,715,587,404,276, 949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612,429,301, 246,118,974,846,663,535,480,352,897,769,714,586,403,220,948,820,765,637,454, 326,999,871,688,560,505,377,194,922,794,739,611,428,300,973,845,662,534,479, 351,168,896,768,713,585,402,274,947,819,764,636,453,325,142,998,870,687,559, 504,376,921,793,738,610,427,299,244,116,972,844,661,533,478,350,895,712,584, 401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737, 609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022,945,817,762, 634,451,323,140,996,868,685,557,502,374,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867, 684,556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892, 709,581,398,270,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,917, 789,734,606,423,295,240,112,968,840,657,529,474,346,891,708,580,397,214,1019, 942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294, 967,839,656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319, 136,992,864,681,553,498,370,915,787,732,604,421,293,238,110,966,838,655,527, 472,344,889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497, 369,186,914,786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394, 266,1016,939,811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602, 419,236,108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627, 444,316,989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524, 469,341,158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677, 549,494,366,911,783,728,600,417,234,962,834,651,523,468,340,885,702,574,391, 208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599, 416,288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,909,781,726,598,415,232,960,832,649,521, 466,338,883,700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491, 363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388, 260,1010,805,750,439,128,984,673,362,907,596,230,830,519,464,698,387,1009, 932,621,310,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748,437, 126,982,671,360,905,594,228,828,517,462,696,385,1007,930,619,308,853,542,487, 176,776,721,410,955,644,333,878,567,256,801,746,435,124,980,669,358,903,592, 226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719,408,953,642, 331,876,565,510,799,744,433,122,978,667,356,901,590,224,824,513,458,692,381, 1003,926,615,304,849,538,483,172] [views:debug,2014-08-19T16:50:45.822,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/293. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:45.823,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",293,active,0} [ns_server:debug,2014-08-19T16:50:45.922,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 291. Nacking mccouch update. [views:debug,2014-08-19T16:50:45.922,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/291. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:45.923,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",291,active,0} [ns_server:debug,2014-08-19T16:50:45.924,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,666,355,900,772,717,589,406,278,951,823,640,512,457,329,146,874, 691,563,508,380,1002,925,797,742,614,431,303,248,120,976,848,665,537,482,354, 899,771,716,588,405,222,950,822,767,639,456,328,873,690,562,507,379,196,1001, 924,796,741,613,430,302,975,847,664,536,481,353,170,898,770,715,587,404,276, 949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612,429,301, 246,118,974,846,663,535,480,352,897,769,714,586,403,220,948,820,765,637,454, 326,999,871,688,560,505,377,194,922,794,739,611,428,300,973,845,662,534,479, 351,168,896,768,713,585,402,274,947,819,764,636,453,325,142,998,870,687,559, 504,376,921,793,738,610,427,299,244,116,972,844,661,533,478,350,895,712,584, 401,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737, 609,426,298,971,843,660,532,477,349,166,894,711,583,400,272,1022,945,817,762, 634,451,323,140,996,868,685,557,502,374,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867, 684,556,501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892, 709,581,398,270,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,917, 789,734,606,423,295,240,112,968,840,657,529,474,346,891,708,580,397,214,1019, 942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294, 967,839,656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319, 136,992,864,681,553,498,370,915,787,732,604,421,293,238,110,966,838,655,527, 472,344,889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497, 369,186,914,786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394, 266,1016,939,811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602, 419,291,236,108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755, 627,444,316,989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652, 524,469,341,158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860, 677,549,494,366,911,783,728,600,417,234,962,834,651,523,468,340,885,702,574, 391,208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727, 599,416,288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752, 624,441,313,130,986,858,675,547,492,364,909,781,726,598,415,232,960,832,649, 521,466,338,883,700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546, 491,363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571, 388,260,1010,805,750,439,128,984,673,362,907,596,230,830,519,464,698,387, 1009,932,621,310,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748, 437,126,982,671,360,905,594,228,828,517,462,696,385,1007,930,619,308,853,542, 487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980,669,358,903, 592,226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719,408,953, 642,331,876,565,510,799,744,433,122,978,667,356,901,590,224,824,513,458,692, 381,1003,926,615,304,849,538,483,172] [views:debug,2014-08-19T16:50:45.981,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/291. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:45.982,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",291,active,0} [ns_server:debug,2014-08-19T16:50:46.082,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 289. Nacking mccouch update. [views:debug,2014-08-19T16:50:46.082,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/289. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:46.082,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",289,active,0} [ns_server:debug,2014-08-19T16:50:46.084,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,666,355,900,589,278,951,823,640,512,457,329,146,874,691,563,508, 380,1002,925,797,742,614,431,303,248,120,976,848,665,537,482,354,899,771,716, 588,405,222,950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741, 613,430,302,975,847,664,536,481,353,170,898,770,715,587,404,276,949,821,766, 638,455,327,144,872,689,561,506,378,1000,923,795,740,612,429,301,246,118,974, 846,663,535,480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871, 688,560,505,377,194,922,794,739,611,428,300,973,845,662,534,479,351,168,896, 768,713,585,402,274,947,819,764,636,453,325,142,998,870,687,559,504,376,921, 793,738,610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,218,1023, 946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298, 971,843,660,532,477,349,166,894,711,583,400,272,1022,945,817,762,634,451,323, 140,996,868,685,557,502,374,919,791,736,608,425,297,242,114,970,842,659,531, 476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867,684,556,501, 373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892,709,581,398, 270,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,917,789,734,606, 423,295,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942,814,759, 631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656, 528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864, 681,553,498,370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236, 108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316, 989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341, 158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494, 366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441, 313,130,986,858,675,547,492,364,909,781,726,598,415,232,960,832,649,521,466, 338,883,700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491,363, 180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388,260, 1010,805,750,439,128,984,673,362,907,596,230,830,519,464,698,387,1009,932, 621,310,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748,437,126, 982,671,360,905,594,228,828,517,462,696,385,1007,930,619,308,853,542,487,176, 776,721,410,955,644,333,878,567,256,801,746,435,124,980,669,358,903,592,226, 826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719,408,953,642,331, 876,565,510,799,744,433,122,978,667,356,901,590,224,824,513,458,692,381,1003, 926,615,304,849,538,483,172,772,717,406] [rebalance:info,2014-08-19T16:50:46.129,ns_1@10.242.238.88:<0.12029.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 682) [rebalance:info,2014-08-19T16:50:46.130,ns_1@10.242.238.88:<0.9857.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [ns_server:info,2014-08-19T16:50:46.134,ns_1@10.242.238.88:<0.9865.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_682_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:46.134,ns_1@10.242.238.88:<0.9857.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:46.136,ns_1@10.242.238.88:<0.9857.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 682 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.12032.1> [ns_server:info,2014-08-19T16:50:46.137,ns_1@10.242.238.88:<0.12032.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 682 to state replica [views:debug,2014-08-19T16:50:46.148,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/289. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:46.148,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",289,active,0} [ns_server:debug,2014-08-19T16:50:46.181,ns_1@10.242.238.88:<0.12032.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_682 [rebalance:info,2014-08-19T16:50:46.183,ns_1@10.242.238.88:<0.12032.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[682]}, {checkpoints,[{682,1}]}, {name,<<"rebalance_682">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[682]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"682"}]} [rebalance:debug,2014-08-19T16:50:46.184,ns_1@10.242.238.88:<0.12032.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.12033.1> [rebalance:info,2014-08-19T16:50:46.185,ns_1@10.242.238.88:<0.12032.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:46.187,ns_1@10.242.238.88:<0.12032.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:46.187,ns_1@10.242.238.88:<0.12032.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:46.188,ns_1@10.242.238.88:<0.9857.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 682 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:46.190,ns_1@10.242.238.88:<0.9865.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:46.194,ns_1@10.242.238.88:<0.9865.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_682_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:46.195,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 682 state change [{'ns_1@10.242.238.91',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:46.195,ns_1@10.242.238.88:<0.12037.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 682 state change: {'ns_1@10.242.238.91',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:46.217,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:46.217,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:46.218,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:46.218,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{682, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.91']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:46.218,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:46.229,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 682 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:46.230,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:46.230,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 682) [ns_server:debug,2014-08-19T16:50:46.306,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 287. Nacking mccouch update. [views:debug,2014-08-19T16:50:46.306,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/287. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:46.306,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",287,active,0} [ns_server:debug,2014-08-19T16:50:46.308,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,666,355,900,589,278,951,823,640,512,457,329,146,874,691,563,508, 380,1002,925,797,742,614,431,303,248,120,976,848,665,537,482,354,899,771,716, 588,405,222,950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741, 613,430,302,975,847,664,536,481,353,170,898,770,715,587,404,276,949,821,766, 638,455,327,144,872,689,561,506,378,1000,923,795,740,612,429,301,246,118,974, 846,663,535,480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871, 688,560,505,377,194,922,794,739,611,428,300,973,845,662,534,479,351,168,896, 768,713,585,402,274,947,819,764,636,453,325,142,998,870,687,559,504,376,921, 793,738,610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,218,1023, 946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298, 971,843,660,532,477,349,166,894,711,583,400,272,1022,945,817,762,634,451,323, 140,996,868,685,557,502,374,919,791,736,608,425,297,242,114,970,842,659,531, 476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867,684,556,501, 373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892,709,581,398, 270,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,917,789,734,606, 423,295,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942,814,759, 631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656, 528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864, 681,553,498,370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236, 108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316, 989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341, 158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494, 366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441, 313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521, 466,338,883,700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491, 363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388, 260,1010,805,750,439,128,984,673,362,907,596,230,830,519,464,698,387,1009, 932,621,310,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748,437, 126,982,671,360,905,594,228,828,517,462,696,385,1007,930,619,308,853,542,487, 176,776,721,410,955,644,333,878,567,256,801,746,435,124,980,669,358,903,592, 226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719,408,953,642, 331,876,565,510,799,744,433,122,978,667,356,901,590,224,824,513,458,692,381, 1003,926,615,304,849,538,483,172,772,717,406] [views:debug,2014-08-19T16:50:46.341,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/287. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:46.342,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",287,active,0} [ns_server:debug,2014-08-19T16:50:46.416,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 285. Nacking mccouch update. [views:debug,2014-08-19T16:50:46.416,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/285. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:46.416,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",285,active,0} [ns_server:debug,2014-08-19T16:50:46.418,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,666,355,900,589,278,951,823,640,512,457,329,146,874,691,563,508, 380,1002,925,797,742,614,431,303,248,120,976,848,665,537,482,354,899,771,716, 588,405,222,950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741, 613,430,302,975,847,664,536,481,353,170,898,770,715,587,404,276,949,821,766, 638,455,327,144,872,689,561,506,378,1000,923,795,740,612,429,301,246,118,974, 846,663,535,480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871, 688,560,505,377,194,922,794,739,611,428,300,973,845,662,534,479,351,168,896, 768,713,585,402,274,947,819,764,636,453,325,142,998,870,687,559,504,376,921, 793,738,610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,218,1023, 946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298, 971,843,660,532,477,349,166,894,711,583,400,272,1022,945,817,762,634,451,323, 140,996,868,685,557,502,374,919,791,736,608,425,297,242,114,970,842,659,531, 476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867,684,556,501, 373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892,709,581,398, 270,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,917,789,734,606, 423,295,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942,814,759, 631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656, 528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864, 681,553,498,370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236, 108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316, 989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341, 158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494, 366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441, 313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521, 466,338,883,700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491, 363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388, 260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387, 1009,932,621,310,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748, 437,126,982,671,360,905,594,228,828,517,462,696,385,1007,930,619,308,853,542, 487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980,669,358,903, 592,226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719,408,953, 642,331,876,565,510,799,744,433,122,978,667,356,901,590,224,824,513,458,692, 381,1003,926,615,304,849,538,483,172,772,717,406] [views:debug,2014-08-19T16:50:46.450,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/285. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:46.450,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",285,active,0} [ns_server:debug,2014-08-19T16:50:46.526,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 283. Nacking mccouch update. [views:debug,2014-08-19T16:50:46.526,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/283. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:46.526,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",283,active,0} [ns_server:debug,2014-08-19T16:50:46.528,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,666,355,900,589,278,951,823,640,512,457,329,146,874,691,563,508, 380,1002,925,797,742,614,431,303,248,120,976,848,665,537,482,354,899,771,716, 588,405,222,950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741, 613,430,302,975,847,664,536,481,353,170,898,770,715,587,404,276,949,821,766, 638,455,327,144,872,689,561,506,378,1000,923,795,740,612,429,301,246,118,974, 846,663,535,480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871, 688,560,505,377,194,922,794,739,611,428,300,973,845,662,534,479,351,168,896, 768,713,585,402,274,947,819,764,636,453,325,142,998,870,687,559,504,376,921, 793,738,610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,218,1023, 946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298, 971,843,660,532,477,349,166,894,711,583,400,272,1022,945,817,762,634,451,323, 140,996,868,685,557,502,374,919,791,736,608,425,297,242,114,970,842,659,531, 476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867,684,556,501, 373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892,709,581,398, 270,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,917,789,734,606, 423,295,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942,814,759, 631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656, 528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864, 681,553,498,370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236, 108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316, 989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341, 158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494, 366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441, 313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521, 466,338,883,700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491, 363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388, 260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387, 1009,932,621,310,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748, 437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308,853, 542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980,669,358, 903,592,226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719,408, 953,642,331,876,565,510,799,744,433,122,978,667,356,901,590,224,824,513,458, 692,381,1003,926,615,304,849,538,483,172,772,717,406] [views:debug,2014-08-19T16:50:46.559,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/283. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:46.560,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",283,active,0} [ns_server:debug,2014-08-19T16:50:46.635,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 281. Nacking mccouch update. [views:debug,2014-08-19T16:50:46.635,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/281. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:46.635,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",281,active,0} [ns_server:debug,2014-08-19T16:50:46.637,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,666,355,900,589,278,951,823,640,512,457,329,146,874,691,563,508, 380,1002,925,797,742,614,431,303,248,120,976,848,665,537,482,354,899,771,716, 588,405,222,950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741, 613,430,302,975,847,664,536,481,353,170,898,770,715,587,404,276,949,821,766, 638,455,327,144,872,689,561,506,378,1000,923,795,740,612,429,301,246,118,974, 846,663,535,480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871, 688,560,505,377,194,922,794,739,611,428,300,973,845,662,534,479,351,168,896, 768,713,585,402,274,947,819,764,636,453,325,142,998,870,687,559,504,376,921, 793,738,610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,218,1023, 946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298, 971,843,660,532,477,349,166,894,711,583,400,272,1022,945,817,762,634,451,323, 140,996,868,685,557,502,374,919,791,736,608,425,297,242,114,970,842,659,531, 476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867,684,556,501, 373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892,709,581,398, 270,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,917,789,734,606, 423,295,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942,814,759, 631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656, 528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864, 681,553,498,370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236, 108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316, 989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341, 158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494, 366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441, 313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521, 466,338,883,700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491, 363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388, 260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387, 1009,932,621,310,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748, 437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308,853, 542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980,669,358, 903,592,281,226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719, 408,953,642,331,876,565,510,799,744,433,122,978,667,356,901,590,224,824,513, 458,692,381,1003,926,615,304,849,538,483,172,772,717,406] [views:debug,2014-08-19T16:50:46.669,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/281. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:46.669,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",281,active,0} [ns_server:debug,2014-08-19T16:50:46.833,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 279. Nacking mccouch update. [views:debug,2014-08-19T16:50:46.833,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/279. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:46.834,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",279,active,0} [ns_server:debug,2014-08-19T16:50:46.835,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,666,355,900,589,278,823,512,457,146,874,691,563,508,380,1002,925, 797,742,614,431,303,248,120,976,848,665,537,482,354,899,771,716,588,405,222, 950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302, 975,847,664,536,481,353,170,898,770,715,587,404,276,949,821,766,638,455,327, 144,872,689,561,506,378,1000,923,795,740,612,429,301,246,118,974,846,663,535, 480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871,688,560,505, 377,194,922,794,739,611,428,300,973,845,662,534,479,351,168,896,768,713,585, 402,274,947,819,764,636,453,325,142,998,870,687,559,504,376,921,793,738,610, 427,299,244,116,972,844,661,533,478,350,895,712,584,401,218,1023,946,818,763, 635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298,971,843,660, 532,477,349,166,894,711,583,400,272,1022,945,817,762,634,451,323,140,996,868, 685,557,502,374,919,791,736,608,425,297,242,114,970,842,659,531,476,348,893, 710,582,399,216,1021,944,816,761,633,450,322,995,867,684,556,501,373,190,918, 790,735,607,424,296,969,841,658,530,475,347,164,892,709,581,398,270,1020,943, 815,760,632,449,321,138,994,866,683,555,500,372,917,789,734,606,423,295,240, 112,968,840,657,529,474,346,891,708,580,397,214,1019,942,814,759,631,448,320, 993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656,528,473,345, 162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498, 370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395, 212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603, 420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756,628, 445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836, 653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861,678, 550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158,886,703, 575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,911,783, 728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,208,1013,936,808, 753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833, 650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,313,130,986, 858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521,466,338,883, 700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180,908, 780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388,260,1010,805, 750,439,128,984,673,362,907,596,285,230,830,519,464,698,387,1009,932,621,310, 855,544,489,178,778,723,412,957,646,335,880,569,258,803,748,437,126,982,671, 360,905,594,283,228,828,517,462,696,385,1007,930,619,308,853,542,487,176,776, 721,410,955,644,333,878,567,256,801,746,435,124,980,669,358,903,592,281,226, 826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719,408,953,642,331, 876,565,510,799,744,433,122,978,667,356,901,590,279,224,824,513,458,692,381, 1003,926,615,304,849,538,483,172,772,717,406,951,640,329] [views:debug,2014-08-19T16:50:46.917,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/279. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:46.917,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",279,active,0} [ns_server:debug,2014-08-19T16:50:47.092,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 277. Nacking mccouch update. [views:debug,2014-08-19T16:50:47.093,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/277. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:47.093,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",277,active,0} [ns_server:debug,2014-08-19T16:50:47.094,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,666,355,900,589,278,823,512,457,146,874,691,563,508,380,1002,925, 797,742,614,431,303,248,120,976,848,665,537,482,354,899,771,716,588,405,277, 222,950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430, 302,975,847,664,536,481,353,170,898,770,715,587,404,276,949,821,766,638,455, 327,144,872,689,561,506,378,1000,923,795,740,612,429,301,246,118,974,846,663, 535,480,352,897,769,714,586,403,220,948,820,765,637,454,326,999,871,688,560, 505,377,194,922,794,739,611,428,300,973,845,662,534,479,351,168,896,768,713, 585,402,274,947,819,764,636,453,325,142,998,870,687,559,504,376,921,793,738, 610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,218,1023,946,818, 763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298,971,843, 660,532,477,349,166,894,711,583,400,272,1022,945,817,762,634,451,323,140,996, 868,685,557,502,374,919,791,736,608,425,297,242,114,970,842,659,531,476,348, 893,710,582,399,216,1021,944,816,761,633,450,322,995,867,684,556,501,373,190, 918,790,735,607,424,296,969,841,658,530,475,347,164,892,709,581,398,270,1020, 943,815,760,632,449,321,138,994,866,683,555,500,372,917,789,734,606,423,295, 240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942,814,759,631,448, 320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656,528,473, 345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553, 498,370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578, 395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731, 603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756, 628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964, 836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158,886, 703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,911, 783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,208,1013,936, 808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961, 833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,313,130, 986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521,466,338, 883,700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180, 908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388,260,1010, 805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387,1009,932,621, 310,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748,437,126,982, 671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308,853,542,487,176, 776,721,410,955,644,333,878,567,256,801,746,435,124,980,669,358,903,592,281, 226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719,408,953,642, 331,876,565,510,799,744,433,122,978,667,356,901,590,279,224,824,513,458,692, 381,1003,926,615,304,849,538,483,172,772,717,406,951,640,329] [views:debug,2014-08-19T16:50:47.176,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/277. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:47.176,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",277,active,0} [ns_server:debug,2014-08-19T16:50:47.351,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 275. Nacking mccouch update. [views:debug,2014-08-19T16:50:47.352,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/275. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:47.352,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",275,active,0} [ns_server:debug,2014-08-19T16:50:47.354,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,666,355,900,589,278,823,512,457,146,874,691,563,508,380,1002,925, 797,742,614,431,303,248,120,976,848,665,537,482,354,899,771,716,588,405,277, 222,950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430, 302,975,847,664,536,481,353,170,898,770,715,587,404,276,949,821,766,638,455, 327,144,872,689,561,506,378,1000,923,795,740,612,429,301,246,118,974,846,663, 535,480,352,897,769,714,586,403,275,220,948,820,765,637,454,326,999,871,688, 560,505,377,194,922,794,739,611,428,300,973,845,662,534,479,351,168,896,768, 713,585,402,274,947,819,764,636,453,325,142,998,870,687,559,504,376,921,793, 738,610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,218,1023,946, 818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298,971, 843,660,532,477,349,166,894,711,583,400,272,1022,945,817,762,634,451,323,140, 996,868,685,557,502,374,919,791,736,608,425,297,242,114,970,842,659,531,476, 348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867,684,556,501,373, 190,918,790,735,607,424,296,969,841,658,530,475,347,164,892,709,581,398,270, 1020,943,815,760,632,449,321,138,994,866,683,555,500,372,917,789,734,606,423, 295,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942,814,759,631, 448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656,528, 473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681, 553,498,370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706, 578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786, 731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811, 756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108, 964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989, 861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158, 886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,208,1013, 936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288, 961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,313, 130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521,466, 338,883,700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491,363, 180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388,260, 1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387,1009, 932,621,310,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748,437, 126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308,853,542, 487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980,669,358,903, 592,281,226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719,408, 953,642,331,876,565,510,799,744,433,122,978,667,356,901,590,279,224,824,513, 458,692,381,1003,926,615,304,849,538,483,172,772,717,406,951,640,329] [views:debug,2014-08-19T16:50:47.402,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/275. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:47.403,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",275,active,0} [ns_server:debug,2014-08-19T16:50:47.544,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 273. Nacking mccouch update. [views:debug,2014-08-19T16:50:47.544,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/273. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:47.544,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",273,active,0} [ns_server:debug,2014-08-19T16:50:47.546,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,666,355,900,589,278,823,512,457,146,874,691,563,508,380,1002,925, 797,742,614,431,303,248,120,976,848,665,537,482,354,899,771,716,588,405,277, 222,950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430, 302,975,847,664,536,481,353,170,898,770,715,587,404,276,949,821,766,638,455, 327,144,872,689,561,506,378,1000,923,795,740,612,429,301,246,118,974,846,663, 535,480,352,897,769,714,586,403,275,220,948,820,765,637,454,326,999,871,688, 560,505,377,194,922,794,739,611,428,300,973,845,662,534,479,351,168,896,768, 713,585,402,274,947,819,764,636,453,325,142,998,870,687,559,504,376,921,793, 738,610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,273,218,1023, 946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298, 971,843,660,532,477,349,166,894,711,583,400,272,1022,945,817,762,634,451,323, 140,996,868,685,557,502,374,919,791,736,608,425,297,242,114,970,842,659,531, 476,348,893,710,582,399,216,1021,944,816,761,633,450,322,995,867,684,556,501, 373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892,709,581,398, 270,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,917,789,734,606, 423,295,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942,814,759, 631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656, 528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864, 681,553,498,370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889, 706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236, 108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316, 989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341, 158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494, 366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441, 313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521, 466,338,883,700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491, 363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388, 260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387, 1009,932,621,310,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748, 437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308,853, 542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980,669,358, 903,592,281,226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719, 408,953,642,331,876,565,510,799,744,433,122,978,667,356,901,590,279,224,824, 513,458,692,381,1003,926,615,304,849,538,483,172,772,717,406,951,640,329] [views:debug,2014-08-19T16:50:47.595,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/273. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:47.595,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",273,active,0} [ns_server:debug,2014-08-19T16:50:47.685,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 271. Nacking mccouch update. [views:debug,2014-08-19T16:50:47.685,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/271. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:47.685,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",271,active,0} [ns_server:debug,2014-08-19T16:50:47.687,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,666,355,900,589,278,823,512,457,146,874,691,563,508,380,1002,925, 797,742,614,431,303,248,120,976,848,665,537,482,354,899,771,716,588,405,277, 222,950,822,767,639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430, 302,975,847,664,536,481,353,170,898,770,715,587,404,276,949,821,766,638,455, 327,144,872,689,561,506,378,1000,923,795,740,612,429,301,246,118,974,846,663, 535,480,352,897,769,714,586,403,275,220,948,820,765,637,454,326,999,871,688, 560,505,377,194,922,794,739,611,428,300,973,845,662,534,479,351,168,896,768, 713,585,402,274,947,819,764,636,453,325,142,998,870,687,559,504,376,921,793, 738,610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,273,218,1023, 946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298, 971,843,660,532,477,349,166,894,711,583,400,272,1022,945,817,762,634,451,323, 140,996,868,685,557,502,374,919,791,736,608,425,297,242,114,970,842,659,531, 476,348,893,710,582,399,271,216,1021,944,816,761,633,450,322,995,867,684,556, 501,373,190,918,790,735,607,424,296,969,841,658,530,475,347,164,892,709,581, 398,270,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,917,789,734, 606,423,295,240,112,968,840,657,529,474,346,891,708,580,397,214,1019,942,814, 759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839, 656,528,473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992, 864,681,553,498,370,915,787,732,604,421,293,238,110,966,838,655,527,472,344, 889,706,578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186, 914,786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016, 939,811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291, 236,108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444, 316,989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469, 341,158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549, 494,366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391, 208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599, 416,288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649, 521,466,338,883,700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546, 491,363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571, 388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387, 1009,932,621,310,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748, 437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308,853, 542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980,669,358, 903,592,281,226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719, 408,953,642,331,876,565,510,799,744,433,122,978,667,356,901,590,279,224,824, 513,458,692,381,1003,926,615,304,849,538,483,172,772,717,406,951,640,329] [rebalance:info,2014-08-19T16:50:47.710,ns_1@10.242.238.88:<0.11370.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 407 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:47.710,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 407 state to active [rebalance:info,2014-08-19T16:50:47.714,ns_1@10.242.238.88:<0.11370.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 407 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:47.715,ns_1@10.242.238.88:<0.11370.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:47.744,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/271. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:47.745,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",271,active,0} [rebalance:info,2014-08-19T16:50:47.787,ns_1@10.242.238.88:<0.11216.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 409 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:47.787,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 409 state to active [rebalance:info,2014-08-19T16:50:47.788,ns_1@10.242.238.88:<0.11216.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 409 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:47.788,ns_1@10.242.238.88:<0.11216.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:47.819,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 269. Nacking mccouch update. [views:debug,2014-08-19T16:50:47.820,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/269. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:47.820,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",269,active,0} [ns_server:debug,2014-08-19T16:50:47.822,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002,925,797,742,614, 431,303,248,120,976,848,665,537,482,354,899,771,716,588,405,277,222,950,822, 767,639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302,975,847, 664,536,481,353,170,898,770,715,587,404,276,949,821,766,638,455,327,144,872, 689,561,506,378,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352, 897,769,714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377, 194,922,794,739,611,428,300,973,845,662,534,479,351,168,896,768,713,585,402, 274,947,819,764,636,453,325,142,998,870,687,559,504,376,921,793,738,610,427, 299,244,116,972,844,661,533,478,350,895,712,584,401,273,218,1023,946,818,763, 635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298,971,843,660, 532,477,349,166,894,711,583,400,272,1022,945,817,762,634,451,323,140,996,868, 685,557,502,374,919,791,736,608,425,297,242,114,970,842,659,531,476,348,893, 710,582,399,271,216,1021,944,816,761,633,450,322,995,867,684,556,501,373,190, 918,790,735,607,424,296,969,841,658,530,475,347,164,892,709,581,398,270,1020, 943,815,760,632,449,321,138,994,866,683,555,500,372,917,789,734,606,423,295, 240,112,968,840,657,529,474,346,891,708,580,397,269,214,1019,942,814,759,631, 448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656,528, 473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681, 553,498,370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706, 578,395,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786, 731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811, 756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108, 964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316,989, 861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158, 886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,208,1013, 936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288, 961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,313, 130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521,466, 338,883,700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491,363, 180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388,260, 1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387,1009, 932,621,310,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748,437, 126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308,853,542, 487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980,669,358,903, 592,281,226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719,408, 953,642,331,876,565,510,799,744,433,122,978,667,356,901,590,279,224,824,513, 458,692,381,1003,926,615,304,849,538,483,172,772,717,406,951,640,329,874,563, 508] [rebalance:info,2014-08-19T16:50:47.868,ns_1@10.242.238.88:<0.11058.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 411 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:47.868,ns_1@10.242.238.88:<0.11433.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 406 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:47.868,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 411 state to active [rebalance:info,2014-08-19T16:50:47.870,ns_1@10.242.238.88:<0.11058.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 411 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:47.870,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 406 state to active [rebalance:info,2014-08-19T16:50:47.871,ns_1@10.242.238.88:<0.11433.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 406 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:47.871,ns_1@10.242.238.88:<0.11058.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:47.871,ns_1@10.242.238.88:<0.11433.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:47.878,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/269. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:47.879,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",269,active,0} [ns_server:debug,2014-08-19T16:50:47.951,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,916, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_916_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_916_'ns_1@10.242.238.89'">>}]}, {move_state,406, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_406_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_406_'ns_1@10.242.238.90'">>}]}, {move_state,662, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_662_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_662_'ns_1@10.242.238.89'">>}]}, {move_state,917, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_917_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_917_'ns_1@10.242.238.89'">>}]}, {move_state,407, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_407_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_407_'ns_1@10.242.238.90'">>}]}, {move_state,663, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_663_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_663_'ns_1@10.242.238.89'">>}]}, {move_state,918, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_918_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_918_'ns_1@10.242.238.89'">>}]}, {move_state,408, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_408_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_408_'ns_1@10.242.238.90'">>}]}, {move_state,664, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_664_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_664_'ns_1@10.242.238.89'">>}]}, {move_state,919, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_919_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_919_'ns_1@10.242.238.89'">>}]}, {move_state,409, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_409_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_409_'ns_1@10.242.238.90'">>}]}, {move_state,665, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_665_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_665_'ns_1@10.242.238.89'">>}]}, {move_state,920, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_920_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_920_'ns_1@10.242.238.89'">>}]}, {move_state,410, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_410_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_410_'ns_1@10.242.238.90'">>}]}, {move_state,666, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_666_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_666_'ns_1@10.242.238.89'">>}]}, {move_state,921, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_921_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_921_'ns_1@10.242.238.89'">>}]}, {move_state,411, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_411_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_411_'ns_1@10.242.238.90'">>}]}, {move_state,667, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_667_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_667_'ns_1@10.242.238.89'">>}]}, {move_state,922, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_922_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_922_'ns_1@10.242.238.89'">>}]}, {move_state,412, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_412_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_412_'ns_1@10.242.238.90'">>}]}, {move_state,668, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_668_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_668_'ns_1@10.242.238.89'">>}]}, {move_state,923, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_923_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_923_'ns_1@10.242.238.89'">>}]}, {move_state,413, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_413_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_413_'ns_1@10.242.238.90'">>}]}, {move_state,669, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_669_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_669_'ns_1@10.242.238.89'">>}]}, {move_state,924, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_924_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_924_'ns_1@10.242.238.89'">>}]}, {move_state,414, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_414_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_414_'ns_1@10.242.238.90'">>}]}, {move_state,670, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_670_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_670_'ns_1@10.242.238.89'">>}]}, {move_state,925, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_925_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_925_'ns_1@10.242.238.89'">>}]}, {move_state,415, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_415_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_415_'ns_1@10.242.238.90'">>}]}, {move_state,671, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_671_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_671_'ns_1@10.242.238.89'">>}]}, {move_state,926, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_926_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_926_'ns_1@10.242.238.89'">>}]}, {move_state,416, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_416_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_416_'ns_1@10.242.238.90'">>}]}, {move_state,672, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_672_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_672_'ns_1@10.242.238.89'">>}]}, {move_state,927, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_927_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_927_'ns_1@10.242.238.89'">>}]}, {move_state,417, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_417_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_417_'ns_1@10.242.238.90'">>}]}, {move_state,673, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_673_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_673_'ns_1@10.242.238.89'">>}]}, {move_state,928, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_928_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_928_'ns_1@10.242.238.89'">>}]}, {move_state,418, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_418_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_418_'ns_1@10.242.238.90'">>}]}, {move_state,674, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_674_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_674_'ns_1@10.242.238.89'">>}]}, {move_state,929, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_929_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_929_'ns_1@10.242.238.89'">>}]}, {move_state,419, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_419_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_419_'ns_1@10.242.238.90'">>}]}, {move_state,675, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_675_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_675_'ns_1@10.242.238.89'">>}]}, {move_state,930, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_930_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_930_'ns_1@10.242.238.89'">>}]}, {move_state,420, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_420_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_420_'ns_1@10.242.238.90'">>}]}, {move_state,676, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_676_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_676_'ns_1@10.242.238.89'">>}]}, {move_state,931, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_931_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_931_'ns_1@10.242.238.89'">>}]}, {move_state,421, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_421_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_421_'ns_1@10.242.238.90'">>}]}, {move_state,677, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_677_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_677_'ns_1@10.242.238.89'">>}]}, {move_state,932, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_932_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_932_'ns_1@10.242.238.89'">>}]}, {move_state,422, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_422_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_422_'ns_1@10.242.238.90'">>}]}, {move_state,678, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_678_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_678_'ns_1@10.242.238.89'">>}]}, {move_state,933, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_933_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_933_'ns_1@10.242.238.89'">>}]}, {move_state,423, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_423_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_423_'ns_1@10.242.238.90'">>}]}, {move_state,679, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_679_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_679_'ns_1@10.242.238.89'">>}]}, {move_state,934, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_934_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_934_'ns_1@10.242.238.89'">>}]}, {move_state,424, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_424_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_424_'ns_1@10.242.238.90'">>}]}, {move_state,680, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_680_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_680_'ns_1@10.242.238.89'">>}]}, {move_state,935, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_935_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_935_'ns_1@10.242.238.89'">>}]}, {move_state,425, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_425_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_425_'ns_1@10.242.238.90'">>}]}, {move_state,681, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_681_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_681_'ns_1@10.242.238.89'">>}]}, {move_state,936, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_936_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_936_'ns_1@10.242.238.89'">>}]}, {move_state,426, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_426_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_426_'ns_1@10.242.238.90'">>}]}, {move_state,937, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_937_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_937_'ns_1@10.242.238.89'">>}]}] [ns_server:debug,2014-08-19T16:50:47.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 916, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 406, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:47.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 662, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 917, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 407, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:47.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 663, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 918, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 408, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:47.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 664, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 919, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 409, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:47.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 665, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 920, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 410, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:47.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 666, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 921, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 411, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:47.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 667, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 922, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 412, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:47.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 668, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 923, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 413, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:47.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 669, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 924, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 414, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:47.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 670, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 925, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 415, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:47.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 671, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 926, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 416, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:47.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 672, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 927, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 417, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:47.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 673, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 928, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 418, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:47.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 674, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 929, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 419, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:47.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 675, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 930, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 420, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:47.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 676, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 931, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 421, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:47.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 677, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 932, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 422, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [rebalance:info,2014-08-19T16:50:47.980,ns_1@10.242.238.88:<0.10890.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 413 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:47.980,ns_1@10.242.238.88:<0.11293.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 408 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:47.980,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 413 state to active [ns_server:debug,2014-08-19T16:50:47.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 678, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [rebalance:info,2014-08-19T16:50:47.981,ns_1@10.242.238.88:<0.10890.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 413 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:47.981,ns_1@10.242.238.88:<0.19214.0>:ns_memcached:do_handle_call:527]Changed vbucket 408 state to active [rebalance:info,2014-08-19T16:50:47.983,ns_1@10.242.238.88:<0.11293.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 408 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:47.983,ns_1@10.242.238.88:<0.10890.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:47.983,ns_1@10.242.238.88:<0.11293.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:47.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 933, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 423, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:47.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 679, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.985,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 934, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.985,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 424, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:47.986,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 680, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.986,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 935, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.987,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 425, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:47.987,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 681, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.988,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 936, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:47.988,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 426, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:47.989,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 937, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:48.028,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 267. Nacking mccouch update. [views:debug,2014-08-19T16:50:48.029,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/267. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:48.029,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",267,active,0} [ns_server:debug,2014-08-19T16:50:48.031,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002,925,797,742,614, 431,303,248,120,976,848,665,537,482,354,899,771,716,588,405,277,222,950,822, 767,639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302,975,847, 664,536,481,353,170,898,770,715,587,404,276,949,821,766,638,455,327,144,872, 689,561,506,378,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352, 897,769,714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377, 194,922,794,739,611,428,300,973,845,662,534,479,351,168,896,768,713,585,402, 274,947,819,764,636,453,325,142,998,870,687,559,504,376,921,793,738,610,427, 299,244,116,972,844,661,533,478,350,895,712,584,401,273,218,1023,946,818,763, 635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298,971,843,660, 532,477,349,166,894,711,583,400,272,1022,945,817,762,634,451,323,140,996,868, 685,557,502,374,919,791,736,608,425,297,242,114,970,842,659,531,476,348,893, 710,582,399,271,216,1021,944,816,761,633,450,322,995,867,684,556,501,373,190, 918,790,735,607,424,296,969,841,658,530,475,347,164,892,709,581,398,270,1020, 943,815,760,632,449,321,138,994,866,683,555,500,372,917,789,734,606,423,295, 240,112,968,840,657,529,474,346,891,708,580,397,269,214,1019,942,814,759,631, 448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656,528, 473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681, 553,498,370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706, 578,395,267,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236, 108,964,836,653,525,470,342,887,704,576,393,210,1015,938,810,755,627,444,316, 989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341, 158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494, 366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441, 313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521, 466,338,883,700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546,491, 363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388, 260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387, 1009,932,621,310,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748, 437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308,853, 542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980,669,358, 903,592,281,226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719, 408,953,642,331,876,565,510,799,744,433,122,978,667,356,901,590,279,224,824, 513,458,692,381,1003,926,615,304,849,538,483,172,772,717,406,951,640,329,874, 563,508] [rebalance:info,2014-08-19T16:50:48.047,ns_1@10.242.238.88:<0.11139.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 410 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:48.047,ns_1@10.242.238.88:<0.10745.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 415 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:48.048,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 410 state to active [rebalance:info,2014-08-19T16:50:48.049,ns_1@10.242.238.88:<0.11139.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 410 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:48.049,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 415 state to active [rebalance:info,2014-08-19T16:50:48.050,ns_1@10.242.238.88:<0.10745.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 415 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:48.050,ns_1@10.242.238.88:<0.11139.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:48.051,ns_1@10.242.238.88:<0.10745.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:48.079,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/267. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:48.079,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",267,active,0} [rebalance:info,2014-08-19T16:50:48.114,ns_1@10.242.238.88:<0.10605.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 417 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:48.114,ns_1@10.242.238.88:<0.10967.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 412 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:48.114,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 417 state to active [rebalance:info,2014-08-19T16:50:48.116,ns_1@10.242.238.88:<0.10605.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 417 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:48.116,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 412 state to active [rebalance:info,2014-08-19T16:50:48.117,ns_1@10.242.238.88:<0.10967.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 412 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:48.117,ns_1@10.242.238.88:<0.10605.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:48.118,ns_1@10.242.238.88:<0.10967.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:48.181,ns_1@10.242.238.88:<0.10446.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 419 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:48.181,ns_1@10.242.238.88:<0.10827.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 414 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:48.182,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 419 state to active [rebalance:info,2014-08-19T16:50:48.183,ns_1@10.242.238.88:<0.10446.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 419 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:48.183,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 414 state to active [rebalance:info,2014-08-19T16:50:48.184,ns_1@10.242.238.88:<0.10827.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 414 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:48.184,ns_1@10.242.238.88:<0.10446.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:48.185,ns_1@10.242.238.88:<0.10827.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:48.196,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 265. Nacking mccouch update. [views:debug,2014-08-19T16:50:48.196,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/265. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:48.196,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",265,active,0} [ns_server:debug,2014-08-19T16:50:48.198,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002,925,797,742,614, 431,303,248,120,976,848,665,537,482,354,899,771,716,588,405,277,222,950,822, 767,639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302,975,847, 664,536,481,353,170,898,770,715,587,404,276,949,821,766,638,455,327,144,872, 689,561,506,378,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352, 897,769,714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377, 194,922,794,739,611,428,300,973,845,662,534,479,351,168,896,768,713,585,402, 274,947,819,764,636,453,325,142,998,870,687,559,504,376,921,793,738,610,427, 299,244,116,972,844,661,533,478,350,895,712,584,401,273,218,1023,946,818,763, 635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298,971,843,660, 532,477,349,166,894,711,583,400,272,1022,945,817,762,634,451,323,140,996,868, 685,557,502,374,919,791,736,608,425,297,242,114,970,842,659,531,476,348,893, 710,582,399,271,216,1021,944,816,761,633,450,322,995,867,684,556,501,373,190, 918,790,735,607,424,296,969,841,658,530,475,347,164,892,709,581,398,270,1020, 943,815,760,632,449,321,138,994,866,683,555,500,372,917,789,734,606,423,295, 240,112,968,840,657,529,474,346,891,708,580,397,269,214,1019,942,814,759,631, 448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656,528, 473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681, 553,498,370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706, 578,395,267,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236, 108,964,836,653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444, 316,989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469, 341,158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549, 494,366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391, 208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599, 416,288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649, 521,466,338,883,700,572,389,206,1011,934,806,751,623,440,312,985,857,674,546, 491,363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571, 388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387, 1009,932,621,310,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748, 437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308,853, 542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980,669,358, 903,592,281,226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719, 408,953,642,331,876,565,510,799,744,433,122,978,667,356,901,590,279,224,824, 513,458,692,381,1003,926,615,304,849,538,483,172,772,717,406,951,640,329,874, 563,508] [rebalance:info,2014-08-19T16:50:48.248,ns_1@10.242.238.88:<0.10668.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 416 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:48.249,ns_1@10.242.238.88:<0.10288.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 421 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:48.249,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 416 state to active [rebalance:info,2014-08-19T16:50:48.250,ns_1@10.242.238.88:<0.10668.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 416 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:48.250,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 421 state to active [rebalance:info,2014-08-19T16:50:48.251,ns_1@10.242.238.88:<0.10288.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 421 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:48.252,ns_1@10.242.238.88:<0.10668.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:48.252,ns_1@10.242.238.88:<0.10288.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:48.263,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/265. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:48.263,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",265,active,0} [ns_server:debug,2014-08-19T16:50:48.338,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 263. Nacking mccouch update. [views:debug,2014-08-19T16:50:48.338,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/263. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:48.338,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",263,active,0} [rebalance:info,2014-08-19T16:50:48.340,ns_1@10.242.238.88:<0.10528.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 418 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:48.340,ns_1@10.242.238.88:<0.10104.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 423 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:48.341,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 418 state to active [ns_server:debug,2014-08-19T16:50:48.341,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002,925,797,742,614, 431,303,248,120,976,848,665,537,482,354,899,771,716,588,405,277,222,950,822, 767,639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302,975,847, 664,536,481,353,170,898,770,715,587,404,276,949,821,766,638,455,327,144,872, 689,561,506,378,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352, 897,769,714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377, 194,922,794,739,611,428,300,973,845,662,534,479,351,168,896,768,713,585,402, 274,947,819,764,636,453,325,142,998,870,687,559,504,376,921,793,738,610,427, 299,244,116,972,844,661,533,478,350,895,712,584,401,273,218,1023,946,818,763, 635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298,971,843,660, 532,477,349,166,894,711,583,400,272,1022,945,817,762,634,451,323,140,996,868, 685,557,502,374,919,791,736,608,425,297,242,114,970,842,659,531,476,348,893, 710,582,399,271,216,1021,944,816,761,633,450,322,995,867,684,556,501,373,190, 918,790,735,607,424,296,969,841,658,530,475,347,164,892,709,581,398,270,1020, 943,815,760,632,449,321,138,994,866,683,555,500,372,917,789,734,606,423,295, 240,112,968,840,657,529,474,346,891,708,580,397,269,214,1019,942,814,759,631, 448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656,528, 473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681, 553,498,370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706, 578,395,267,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236, 108,964,836,653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444, 316,989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469, 341,158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549, 494,366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391, 263,208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727, 599,416,288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752, 624,441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832, 649,521,466,338,883,700,572,389,206,1011,934,806,751,623,440,312,985,857,674, 546,491,363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699, 571,388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698, 387,1009,932,621,310,855,544,489,178,778,723,412,957,646,335,880,569,258,803, 748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308, 853,542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980,669, 358,903,592,281,226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774, 719,408,953,642,331,876,565,510,799,744,433,122,978,667,356,901,590,279,224, 824,513,458,692,381,1003,926,615,304,849,538,483,172,772,717,406,951,640,329, 874,563,508] [rebalance:info,2014-08-19T16:50:48.343,ns_1@10.242.238.88:<0.10528.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 418 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:48.343,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 423 state to active [rebalance:info,2014-08-19T16:50:48.344,ns_1@10.242.238.88:<0.10104.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 423 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:48.345,ns_1@10.242.238.88:<0.10528.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:48.345,ns_1@10.242.238.88:<0.10104.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:48.373,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/263. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:48.373,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",263,active,0} [rebalance:info,2014-08-19T16:50:48.432,ns_1@10.242.238.88:<0.10365.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 420 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:48.432,ns_1@10.242.238.88:<0.9960.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 425 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:48.433,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 420 state to active [rebalance:info,2014-08-19T16:50:48.434,ns_1@10.242.238.88:<0.10365.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 420 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:48.434,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 425 state to active [rebalance:info,2014-08-19T16:50:48.435,ns_1@10.242.238.88:<0.9960.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 425 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:48.435,ns_1@10.242.238.88:<0.10365.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:48.436,ns_1@10.242.238.88:<0.9960.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:48.470,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 261. Nacking mccouch update. [views:debug,2014-08-19T16:50:48.471,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/261. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:48.471,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",261,active,0} [ns_server:debug,2014-08-19T16:50:48.473,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,204,804,749,438,983, 672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543,488, 777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282,827, 516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643,332, 877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693,382, 1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198,798, 743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002,925,797,742,614, 431,303,248,120,976,848,665,537,482,354,899,771,716,588,405,277,222,950,822, 767,639,456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302,975,847, 664,536,481,353,170,898,770,715,587,404,276,949,821,766,638,455,327,144,872, 689,561,506,378,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352, 897,769,714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377, 194,922,794,739,611,428,300,973,845,662,534,479,351,168,896,768,713,585,402, 274,947,819,764,636,453,325,142,998,870,687,559,504,376,921,793,738,610,427, 299,244,116,972,844,661,533,478,350,895,712,584,401,273,218,1023,946,818,763, 635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298,971,843,660, 532,477,349,166,894,711,583,400,272,1022,945,817,762,634,451,323,140,996,868, 685,557,502,374,919,791,736,608,425,297,242,114,970,842,659,531,476,348,893, 710,582,399,271,216,1021,944,816,761,633,450,322,995,867,684,556,501,373,190, 918,790,735,607,424,296,969,841,658,530,475,347,164,892,709,581,398,270,1020, 943,815,760,632,449,321,138,994,866,683,555,500,372,917,789,734,606,423,295, 240,112,968,840,657,529,474,346,891,708,580,397,269,214,1019,942,814,759,631, 448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656,528, 473,345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681, 553,498,370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706, 578,395,267,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914, 786,731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236, 108,964,836,653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444, 316,989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469, 341,158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549, 494,366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391, 263,208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727, 599,416,288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752, 624,441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832, 649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857, 674,546,491,363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882, 699,571,388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464, 698,387,1009,932,621,310,855,544,489,178,778,723,412,957,646,335,880,569,258, 803,748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619, 308,853,542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980, 669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306,851,540,485,174, 774,719,408,953,642,331,876,565,510,799,744,433,122,978,667,356,901,590,279, 224,824,513,458,692,381,1003,926,615,304,849,538,483,172,772,717,406,951,640, 329,874,563,508] [views:debug,2014-08-19T16:50:48.521,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/261. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:48.521,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",261,active,0} [rebalance:info,2014-08-19T16:50:48.524,ns_1@10.242.238.88:<0.10186.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 422 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:48.524,ns_1@10.242.238.88:<0.11335.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 663 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:48.525,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 422 state to active [rebalance:info,2014-08-19T16:50:48.526,ns_1@10.242.238.88:<0.10186.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 422 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:48.526,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 663 state to active [rebalance:info,2014-08-19T16:50:48.527,ns_1@10.242.238.88:<0.11335.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 663 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:48.528,ns_1@10.242.238.88:<0.10186.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:48.528,ns_1@10.242.238.88:<0.11335.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:48.613,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 259. Nacking mccouch update. [views:debug,2014-08-19T16:50:48.613,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/259. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:48.613,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",259,active,0} [ns_server:debug,2014-08-19T16:50:48.615,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543, 488,777,722,411,956,645,334,879,568,202,802,747,436,981,670,359,904,593,282, 827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954,643, 332,877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148,693, 382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509,198, 798,743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002,925,614,303, 248,976,848,665,537,482,354,899,771,716,588,405,277,222,950,822,767,639,456, 328,873,690,562,507,379,196,1001,924,796,741,613,430,302,975,847,664,536,481, 353,170,898,770,715,587,404,276,949,821,766,638,455,327,144,872,689,561,506, 378,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352,897,769,714, 586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922,794, 739,611,428,300,973,845,662,534,479,351,168,896,768,713,585,402,274,947,819, 764,636,453,325,142,998,870,687,559,504,376,921,793,738,610,427,299,244,116, 972,844,661,533,478,350,895,712,584,401,273,218,1023,946,818,763,635,452,324, 997,869,686,558,503,375,192,920,792,737,609,426,298,971,843,660,532,477,349, 166,894,711,583,400,272,1022,945,817,762,634,451,323,140,996,868,685,557,502, 374,919,791,736,608,425,297,242,114,970,842,659,531,476,348,893,710,582,399, 271,216,1021,944,816,761,633,450,322,995,867,684,556,501,373,190,918,790,735, 607,424,296,969,841,658,530,475,347,164,892,709,581,398,270,1020,943,815,760, 632,449,321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112,968, 840,657,529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993, 865,682,554,499,371,188,916,788,733,605,422,294,967,839,656,528,473,345,162, 890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498,370, 915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267, 212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603, 420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756,628, 445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836, 653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158,886, 703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,911, 783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208,1013, 936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288, 961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,313, 130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521,466, 338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674,546,491, 363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388, 260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387, 1009,932,621,310,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748, 437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308,853, 542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980,669,358, 903,592,281,226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719, 408,953,642,331,876,565,510,799,744,433,122,978,667,356,901,590,279,224,824, 513,458,692,381,1003,926,615,304,849,538,483,172,772,717,406,951,640,329,874, 563,508,797,742,431,120] [rebalance:info,2014-08-19T16:50:48.652,ns_1@10.242.238.88:<0.11195.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 665 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:48.652,ns_1@10.242.238.88:<0.10041.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 424 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:48.652,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 665 state to active [rebalance:info,2014-08-19T16:50:48.653,ns_1@10.242.238.88:<0.11195.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 665 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:48.653,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 424 state to active [rebalance:info,2014-08-19T16:50:48.654,ns_1@10.242.238.88:<0.10041.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 424 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:48.654,ns_1@10.242.238.88:<0.11195.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:48.655,ns_1@10.242.238.88:<0.10041.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:48.689,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/259. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:48.689,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",259,active,0} [ns_server:debug,2014-08-19T16:50:48.789,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 257. Nacking mccouch update. [views:debug,2014-08-19T16:50:48.789,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/257. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:48.789,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",257,active,0} [ns_server:debug,2014-08-19T16:50:48.791,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543, 488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904,593, 282,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954, 643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148, 693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509, 198,798,743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002,925,614, 303,248,976,848,665,537,482,354,899,771,716,588,405,277,222,950,822,767,639, 456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302,975,847,664,536, 481,353,170,898,770,715,587,404,276,949,821,766,638,455,327,144,872,689,561, 506,378,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352,897,769, 714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922, 794,739,611,428,300,973,845,662,534,479,351,168,896,768,713,585,402,274,947, 819,764,636,453,325,142,998,870,687,559,504,376,921,793,738,610,427,299,244, 116,972,844,661,533,478,350,895,712,584,401,273,218,1023,946,818,763,635,452, 324,997,869,686,558,503,375,192,920,792,737,609,426,298,971,843,660,532,477, 349,166,894,711,583,400,272,1022,945,817,762,634,451,323,140,996,868,685,557, 502,374,919,791,736,608,425,297,242,114,970,842,659,531,476,348,893,710,582, 399,271,216,1021,944,816,761,633,450,322,995,867,684,556,501,373,190,918,790, 735,607,424,296,969,841,658,530,475,347,164,892,709,581,398,270,1020,943,815, 760,632,449,321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112, 968,840,657,529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320, 993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656,528,473,345, 162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498, 370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395, 267,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731, 603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756, 628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964, 836,653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989, 861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158, 886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441, 313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521, 466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674,546, 491,363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571, 388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387, 1009,932,621,310,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748, 437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308,853, 542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980,669,358, 903,592,281,226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774,719, 408,953,642,331,876,565,510,799,744,433,122,978,667,356,901,590,279,224,824, 513,458,692,381,1003,926,615,304,849,538,483,172,772,717,406,951,640,329,874, 563,508,797,742,431,120] [rebalance:info,2014-08-19T16:50:48.794,ns_1@10.242.238.88:<0.11023.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 667 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:48.794,ns_1@10.242.238.88:<0.12439.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 426) [ns_server:info,2014-08-19T16:50:48.794,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 667 state to active [rebalance:info,2014-08-19T16:50:48.795,ns_1@10.242.238.88:<0.9883.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:48.796,ns_1@10.242.238.88:<0.11023.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 667 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:48.796,ns_1@10.242.238.88:<0.11023.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:48.798,ns_1@10.242.238.88:<0.9891.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_426_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:48.798,ns_1@10.242.238.88:<0.9883.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:48.802,ns_1@10.242.238.88:<0.9883.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 426 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.12446.1> [ns_server:info,2014-08-19T16:50:48.803,ns_1@10.242.238.88:<0.12446.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 426 to state replica [ns_server:debug,2014-08-19T16:50:48.836,ns_1@10.242.238.88:<0.12446.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_426 [rebalance:info,2014-08-19T16:50:48.837,ns_1@10.242.238.88:<0.12446.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[426]}, {checkpoints,[{426,1}]}, {name,<<"rebalance_426">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[426]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"426"}]} [rebalance:debug,2014-08-19T16:50:48.838,ns_1@10.242.238.88:<0.12446.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.12447.1> [rebalance:info,2014-08-19T16:50:48.839,ns_1@10.242.238.88:<0.12446.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:48.840,ns_1@10.242.238.88:<0.12446.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:48.841,ns_1@10.242.238.88:<0.12446.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:48.842,ns_1@10.242.238.88:<0.9883.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 426 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:48.843,ns_1@10.242.238.88:<0.9891.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:48.848,ns_1@10.242.238.88:<0.9891.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_426_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:48.848,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 426 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:48.848,ns_1@10.242.238.88:<0.12451.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 426 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:48.854,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:48.855,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{426, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:48.855,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:48.855,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:48.856,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:48.866,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 426 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:48.867,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 426) [ns_server:debug,2014-08-19T16:50:48.868,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [views:debug,2014-08-19T16:50:48.873,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/257. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:48.873,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",257,active,0} [rebalance:info,2014-08-19T16:50:48.877,ns_1@10.242.238.88:<0.11412.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 662 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:48.877,ns_1@10.242.238.88:<0.10869.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 669 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:48.877,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 662 state to active [rebalance:info,2014-08-19T16:50:48.878,ns_1@10.242.238.88:<0.11412.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 662 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:48.879,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 669 state to active [rebalance:info,2014-08-19T16:50:48.880,ns_1@10.242.238.88:<0.10869.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 669 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:48.880,ns_1@10.242.238.88:<0.11412.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:48.880,ns_1@10.242.238.88:<0.10869.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:48.973,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 255. Nacking mccouch update. [views:debug,2014-08-19T16:50:48.973,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/255. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:48.973,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",255,active,0} [ns_server:debug,2014-08-19T16:50:48.975,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543, 488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904,593, 282,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954, 643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148, 693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509, 198,798,743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002,925,614, 303,248,976,848,665,537,482,354,899,771,716,588,405,277,222,950,822,767,639, 456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302,975,847,664,536, 481,353,170,898,770,715,587,404,276,949,821,766,638,455,327,144,872,689,561, 506,378,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352,897,769, 714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922, 794,739,611,428,300,973,845,662,534,479,351,168,896,768,713,585,402,274,947, 819,764,636,453,325,142,998,870,687,559,504,376,921,793,738,610,427,299,244, 116,972,844,661,533,478,350,895,712,584,401,273,218,1023,946,818,763,635,452, 324,997,869,686,558,503,375,192,920,792,737,609,426,298,971,843,660,532,477, 349,166,894,711,583,400,272,1022,945,817,762,634,451,323,140,996,868,685,557, 502,374,919,791,736,608,425,297,242,114,970,842,659,531,476,348,893,710,582, 399,271,216,1021,944,816,761,633,450,322,995,867,684,556,501,373,190,918,790, 735,607,424,296,969,841,658,530,475,347,164,892,709,581,398,270,1020,943,815, 760,632,449,321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112, 968,840,657,529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320, 993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656,528,473,345, 162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498, 370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395, 267,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731, 603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756, 628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964, 836,653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989, 861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158, 886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441, 313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521, 466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674,546, 491,363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571, 388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387, 1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880,569,258,803, 748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308, 853,542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980,669, 358,903,592,281,226,826,515,460,694,383,1005,928,617,306,851,540,485,174,774, 719,408,953,642,331,876,565,510,799,744,433,122,978,667,356,901,590,279,224, 824,513,458,692,381,1003,926,615,304,849,538,483,172,772,717,406,951,640,329, 874,563,508,797,742,431,120] [views:debug,2014-08-19T16:50:49.032,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/255. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:49.032,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",255,active,0} [rebalance:info,2014-08-19T16:50:49.086,ns_1@10.242.238.88:<0.10724.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 671 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:49.086,ns_1@10.242.238.88:<0.11272.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 664 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:49.087,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 671 state to active [rebalance:info,2014-08-19T16:50:49.088,ns_1@10.242.238.88:<0.10724.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 671 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:49.088,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 664 state to active [rebalance:info,2014-08-19T16:50:49.089,ns_1@10.242.238.88:<0.11272.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 664 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:49.089,ns_1@10.242.238.88:<0.10724.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:49.089,ns_1@10.242.238.88:<0.11272.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:49.155,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 253. Nacking mccouch update. [views:debug,2014-08-19T16:50:49.156,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/253. Updated state: active (0) [ns_server:info,2014-08-19T16:50:49.156,ns_1@10.242.238.88:<0.20999.0>:ns_orchestrator:handle_info:428]Skipping janitor in state rebalancing [ns_server:debug,2014-08-19T16:50:49.156,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",253,active,0} [ns_server:debug,2014-08-19T16:50:49.158,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543, 488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904,593, 282,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954, 643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148, 693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509, 198,798,743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002,925,614, 303,248,976,848,665,537,482,354,899,771,716,588,405,277,222,950,822,767,639, 456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302,975,847,664,536, 481,353,170,898,770,715,587,404,276,949,821,766,638,455,327,144,872,689,561, 506,378,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352,897,769, 714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922, 794,739,611,428,300,973,845,662,534,479,351,168,896,768,713,585,402,274,947, 819,764,636,453,325,142,998,870,687,559,504,376,921,793,738,610,427,299,244, 116,972,844,661,533,478,350,895,712,584,401,273,218,1023,946,818,763,635,452, 324,997,869,686,558,503,375,192,920,792,737,609,426,298,971,843,660,532,477, 349,166,894,711,583,400,272,1022,945,817,762,634,451,323,140,996,868,685,557, 502,374,919,791,736,608,425,297,242,114,970,842,659,531,476,348,893,710,582, 399,271,216,1021,944,816,761,633,450,322,995,867,684,556,501,373,190,918,790, 735,607,424,296,969,841,658,530,475,347,164,892,709,581,398,270,1020,943,815, 760,632,449,321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112, 968,840,657,529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320, 993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656,528,473,345, 162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498, 370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395, 267,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731, 603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756, 628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964, 836,653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989, 861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158, 886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441, 313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521, 466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674,546, 491,363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571, 388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387, 1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880,569,258,803, 748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308, 253,853,542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980, 669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306,851,540,485,174, 774,719,408,953,642,331,876,565,510,799,744,433,122,978,667,356,901,590,279, 224,824,513,458,692,381,1003,926,615,304,849,538,483,172,772,717,406,951,640, 329,874,563,508,797,742,431,120] [views:debug,2014-08-19T16:50:49.206,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/253. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:49.206,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",253,active,0} [rebalance:info,2014-08-19T16:50:49.228,ns_1@10.242.238.88:<0.11118.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 666 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:49.228,ns_1@10.242.238.88:<0.10570.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 673 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:49.228,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 666 state to active [rebalance:info,2014-08-19T16:50:49.230,ns_1@10.242.238.88:<0.11118.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 666 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:49.230,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 673 state to active [rebalance:info,2014-08-19T16:50:49.231,ns_1@10.242.238.88:<0.10570.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 673 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:49.231,ns_1@10.242.238.88:<0.11118.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:49.232,ns_1@10.242.238.88:<0.10570.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:49.281,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 251. Nacking mccouch update. [views:debug,2014-08-19T16:50:49.281,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/251. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:49.281,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",251,active,0} [ns_server:debug,2014-08-19T16:50:49.283,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543, 488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904,593, 282,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954, 643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148, 693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509, 198,798,743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002,925,614, 303,248,976,848,665,537,482,354,899,771,716,588,405,277,222,950,822,767,639, 456,328,873,690,562,507,379,196,1001,924,796,741,613,430,302,975,847,664,536, 481,353,170,898,770,715,587,404,276,949,821,766,638,455,327,144,872,689,561, 506,378,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352,897,769, 714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922, 794,739,611,428,300,973,845,662,534,479,351,168,896,768,713,585,402,274,947, 819,764,636,453,325,142,998,870,687,559,504,376,921,793,738,610,427,299,244, 116,972,844,661,533,478,350,895,712,584,401,273,218,1023,946,818,763,635,452, 324,997,869,686,558,503,375,192,920,792,737,609,426,298,971,843,660,532,477, 349,166,894,711,583,400,272,1022,945,817,762,634,451,323,140,996,868,685,557, 502,374,919,791,736,608,425,297,242,114,970,842,659,531,476,348,893,710,582, 399,271,216,1021,944,816,761,633,450,322,995,867,684,556,501,373,190,918,790, 735,607,424,296,969,841,658,530,475,347,164,892,709,581,398,270,1020,943,815, 760,632,449,321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112, 968,840,657,529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320, 993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656,528,473,345, 162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498, 370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395, 267,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731, 603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756, 628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964, 836,653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989, 861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158, 886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441, 313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521, 466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674,546, 491,363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571, 388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387, 1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880,569,258,803, 748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308, 253,853,542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980, 669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306,251,851,540,485, 174,774,719,408,953,642,331,876,565,510,799,744,433,122,978,667,356,901,590, 279,224,824,513,458,692,381,1003,926,615,304,849,538,483,172,772,717,406,951, 640,329,874,563,508,797,742,431,120] [views:debug,2014-08-19T16:50:49.315,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/251. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:49.315,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",251,active,0} [rebalance:info,2014-08-19T16:50:49.387,ns_1@10.242.238.88:<0.10946.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 668 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:49.387,ns_1@10.242.238.88:<0.10425.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 675 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:49.387,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 668 state to active [ns_server:debug,2014-08-19T16:50:49.390,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 249. Nacking mccouch update. [views:debug,2014-08-19T16:50:49.390,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/249. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:49.391,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",249,active,0} [rebalance:info,2014-08-19T16:50:49.391,ns_1@10.242.238.88:<0.10946.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 668 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:49.391,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 675 state to active [ns_server:debug,2014-08-19T16:50:49.393,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543, 488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904,593, 282,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954, 643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148, 693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509, 198,798,743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002,925,614, 303,248,848,537,482,899,771,716,588,405,277,222,950,822,767,639,456,328,873, 690,562,507,379,196,1001,924,796,741,613,430,302,975,847,664,536,481,353,170, 898,770,715,587,404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000, 923,795,740,612,429,301,246,118,974,846,663,535,480,352,897,769,714,586,403, 275,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611, 428,300,973,845,662,534,479,351,168,896,768,713,585,402,274,947,819,764,636, 453,325,142,998,870,687,559,504,376,921,793,738,610,427,299,244,116,972,844, 661,533,478,350,895,712,584,401,273,218,1023,946,818,763,635,452,324,997,869, 686,558,503,375,192,920,792,737,609,426,298,971,843,660,532,477,349,166,894, 711,583,400,272,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,919, 791,736,608,425,297,242,114,970,842,659,531,476,348,893,710,582,399,271,216, 1021,944,816,761,633,450,322,995,867,684,556,501,373,190,918,790,735,607,424, 296,969,841,658,530,475,347,164,892,709,581,398,270,1020,943,815,760,632,449, 321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112,968,840,657, 529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682, 554,499,371,188,916,788,733,605,422,294,967,839,656,528,473,345,162,890,707, 579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,915,787, 732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017, 940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292, 965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756,628,445,317, 134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836,653,525, 470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678,550, 495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158,886,703,575, 392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,911,783,728, 600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208,1013,936,808, 753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961,833, 650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,313,130,986, 858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521,466,338,883, 700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674,546,491,363,180, 908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388,260,1010, 805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387,1009,932,621, 310,255,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748,437,126, 982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308,253,853,542, 487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980,669,358,903, 592,281,226,826,515,460,694,383,1005,928,617,306,251,851,540,485,174,774,719, 408,953,642,331,876,565,510,799,744,433,122,978,667,356,901,590,279,224,824, 513,458,692,381,1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329, 874,563,508,797,742,431,120,976,665,354] [rebalance:info,2014-08-19T16:50:49.395,ns_1@10.242.238.88:<0.10425.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 675 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:49.395,ns_1@10.242.238.88:<0.10946.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:49.396,ns_1@10.242.238.88:<0.10425.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:49.425,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/249. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:49.425,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",249,active,0} [rebalance:info,2014-08-19T16:50:49.546,ns_1@10.242.238.88:<0.10261.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 677 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:49.546,ns_1@10.242.238.88:<0.10800.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 670 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:49.546,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 677 state to active [rebalance:info,2014-08-19T16:50:49.547,ns_1@10.242.238.88:<0.10261.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 677 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:49.548,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 670 state to active [rebalance:info,2014-08-19T16:50:49.549,ns_1@10.242.238.88:<0.10800.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 670 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:49.549,ns_1@10.242.238.88:<0.10261.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:49.549,ns_1@10.242.238.88:<0.10800.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:49.601,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 247. Nacking mccouch update. [views:debug,2014-08-19T16:50:49.601,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/247. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:49.601,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",247,active,0} [ns_server:debug,2014-08-19T16:50:49.603,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543, 488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904,593, 282,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954, 643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148, 693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509, 198,798,743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002,925,614, 303,248,848,537,482,899,771,716,588,405,277,222,950,822,767,639,456,328,873, 690,562,507,379,196,1001,924,796,741,613,430,302,247,975,847,664,536,481,353, 170,898,770,715,587,404,276,949,821,766,638,455,327,144,872,689,561,506,378, 1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352,897,769,714,586, 403,275,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739, 611,428,300,973,845,662,534,479,351,168,896,768,713,585,402,274,947,819,764, 636,453,325,142,998,870,687,559,504,376,921,793,738,610,427,299,244,116,972, 844,661,533,478,350,895,712,584,401,273,218,1023,946,818,763,635,452,324,997, 869,686,558,503,375,192,920,792,737,609,426,298,971,843,660,532,477,349,166, 894,711,583,400,272,1022,945,817,762,634,451,323,140,996,868,685,557,502,374, 919,791,736,608,425,297,242,114,970,842,659,531,476,348,893,710,582,399,271, 216,1021,944,816,761,633,450,322,995,867,684,556,501,373,190,918,790,735,607, 424,296,969,841,658,530,475,347,164,892,709,581,398,270,1020,943,815,760,632, 449,321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112,968,840, 657,529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865, 682,554,499,371,188,916,788,733,605,422,294,967,839,656,528,473,345,162,890, 707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,915, 787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212, 1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420, 292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756,628,445, 317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836,653, 525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678, 550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158,886,703, 575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,911,783, 728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208,1013,936, 808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961, 833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,313,130, 986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521,466,338, 883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674,546,491,363, 180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388,260, 1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387,1009, 932,621,310,255,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748, 437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308,253, 853,542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980,669, 358,903,592,281,226,826,515,460,694,383,1005,928,617,306,251,851,540,485,174, 774,719,408,953,642,331,876,565,510,799,744,433,122,978,667,356,901,590,279, 224,824,513,458,692,381,1003,926,615,304,249,849,538,483,172,772,717,406,951, 640,329,874,563,508,797,742,431,120,976,665,354] [views:debug,2014-08-19T16:50:49.635,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/247. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:49.635,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",247,active,0} [rebalance:info,2014-08-19T16:50:49.688,ns_1@10.242.238.88:<0.10083.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 679 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:49.688,ns_1@10.242.238.88:<0.10647.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 672 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:49.688,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 679 state to active [rebalance:info,2014-08-19T16:50:49.689,ns_1@10.242.238.88:<0.10083.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 679 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:49.690,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 672 state to active [rebalance:info,2014-08-19T16:50:49.691,ns_1@10.242.238.88:<0.10647.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 672 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:49.691,ns_1@10.242.238.88:<0.10083.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:49.691,ns_1@10.242.238.88:<0.10647.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:49.710,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 245. Nacking mccouch update. [views:debug,2014-08-19T16:50:49.710,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/245. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:49.710,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",245,active,0} [ns_server:debug,2014-08-19T16:50:49.712,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543, 488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904,593, 282,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954, 643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148, 693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509, 198,798,743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002,925,614, 303,248,848,537,482,899,771,716,588,405,277,222,950,822,767,639,456,328,873, 690,562,507,379,196,1001,924,796,741,613,430,302,247,975,847,664,536,481,353, 170,898,770,715,587,404,276,949,821,766,638,455,327,144,872,689,561,506,378, 1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352,897,769,714,586, 403,275,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739, 611,428,300,245,973,845,662,534,479,351,168,896,768,713,585,402,274,947,819, 764,636,453,325,142,998,870,687,559,504,376,921,793,738,610,427,299,244,116, 972,844,661,533,478,350,895,712,584,401,273,218,1023,946,818,763,635,452,324, 997,869,686,558,503,375,192,920,792,737,609,426,298,971,843,660,532,477,349, 166,894,711,583,400,272,1022,945,817,762,634,451,323,140,996,868,685,557,502, 374,919,791,736,608,425,297,242,114,970,842,659,531,476,348,893,710,582,399, 271,216,1021,944,816,761,633,450,322,995,867,684,556,501,373,190,918,790,735, 607,424,296,969,841,658,530,475,347,164,892,709,581,398,270,1020,943,815,760, 632,449,321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112,968, 840,657,529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993, 865,682,554,499,371,188,916,788,733,605,422,294,967,839,656,528,473,345,162, 890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498,370, 915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267, 212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603, 420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756,628, 445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836, 653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158,886, 703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,911, 783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208,1013, 936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288, 961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,313, 130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521,466, 338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674,546,491, 363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388, 260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387, 1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880,569,258,803, 748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308, 253,853,542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980, 669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306,251,851,540,485, 174,774,719,408,953,642,331,876,565,510,799,744,433,122,978,667,356,901,590, 279,224,824,513,458,692,381,1003,926,615,304,249,849,538,483,172,772,717,406, 951,640,329,874,563,508,797,742,431,120,976,665,354] [views:debug,2014-08-19T16:50:49.749,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/245. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:49.749,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",245,active,0} [rebalance:info,2014-08-19T16:50:49.792,ns_1@10.242.238.88:<0.10503.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 674 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:49.792,ns_1@10.242.238.88:<0.9939.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 681 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:49.792,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 674 state to active [rebalance:info,2014-08-19T16:50:49.793,ns_1@10.242.238.88:<0.10503.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 674 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:49.793,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 681 state to active [rebalance:info,2014-08-19T16:50:49.795,ns_1@10.242.238.88:<0.9939.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 681 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:49.795,ns_1@10.242.238.88:<0.10503.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:49.795,ns_1@10.242.238.88:<0.9939.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:49.884,ns_1@10.242.238.88:<0.10344.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 676 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:49.884,ns_1@10.242.238.88:<0.11391.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 917 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:49.884,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 676 state to active [rebalance:info,2014-08-19T16:50:49.885,ns_1@10.242.238.88:<0.10344.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 676 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:49.886,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 917 state to active [rebalance:info,2014-08-19T16:50:49.887,ns_1@10.242.238.88:<0.11391.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 917 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:49.887,ns_1@10.242.238.88:<0.10344.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:49.887,ns_1@10.242.238.88:<0.11391.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:49.924,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 243. Nacking mccouch update. [views:debug,2014-08-19T16:50:49.924,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/243. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:49.924,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",243,active,0} [ns_server:debug,2014-08-19T16:50:49.926,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543, 488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904,593, 282,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954, 643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148, 693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509, 198,798,743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002,925,614, 303,248,848,537,482,899,771,716,588,405,277,222,950,822,767,639,456,328,873, 690,562,507,379,196,1001,924,796,741,613,430,302,247,975,847,664,536,481,353, 170,898,770,715,587,404,276,949,821,766,638,455,327,144,872,689,561,506,378, 1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352,897,769,714,586, 403,275,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739, 611,428,300,245,973,845,662,534,479,351,168,896,768,713,585,402,274,947,819, 764,636,453,325,142,998,870,687,559,504,376,921,793,738,610,427,299,244,116, 972,844,661,533,478,350,895,712,584,401,273,218,1023,946,818,763,635,452,324, 997,869,686,558,503,375,192,920,792,737,609,426,298,243,971,843,660,532,477, 349,166,894,711,583,400,272,1022,945,817,762,634,451,323,140,996,868,685,557, 502,374,919,791,736,608,425,297,242,114,970,842,659,531,476,348,893,710,582, 399,271,216,1021,944,816,761,633,450,322,995,867,684,556,501,373,190,918,790, 735,607,424,296,969,841,658,530,475,347,164,892,709,581,398,270,1020,943,815, 760,632,449,321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112, 968,840,657,529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320, 993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656,528,473,345, 162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498, 370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395, 267,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731, 603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756, 628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964, 836,653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989, 861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158, 886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441, 313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521, 466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674,546, 491,363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571, 388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387, 1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880,569,258,803, 748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308, 253,853,542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980, 669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306,251,851,540,485, 174,774,719,408,953,642,331,876,565,510,799,744,433,122,978,667,356,901,590, 279,224,824,513,458,692,381,1003,926,615,304,249,849,538,483,172,772,717,406, 951,640,329,874,563,508,797,742,431,120,976,665,354] [rebalance:info,2014-08-19T16:50:49.968,ns_1@10.242.238.88:<0.10160.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 678 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:49.968,ns_1@10.242.238.88:<0.11251.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 919 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:49.968,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 678 state to active [rebalance:info,2014-08-19T16:50:49.971,ns_1@10.242.238.88:<0.10160.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 678 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:49.971,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 919 state to active [rebalance:info,2014-08-19T16:50:49.972,ns_1@10.242.238.88:<0.11251.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 919 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:49.972,ns_1@10.242.238.88:<0.10160.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:49.973,ns_1@10.242.238.88:<0.11251.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:49.984,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/243. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:49.984,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",243,active,0} [rebalance:info,2014-08-19T16:50:50.035,ns_1@10.242.238.88:<0.11083.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 921 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:50.035,ns_1@10.242.238.88:<0.10006.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 680 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:50.035,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 921 state to active [rebalance:info,2014-08-19T16:50:50.036,ns_1@10.242.238.88:<0.11083.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 921 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:50.036,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 680 state to active [rebalance:info,2014-08-19T16:50:50.038,ns_1@10.242.238.88:<0.10006.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 680 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:50.038,ns_1@10.242.238.88:<0.11083.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:50.038,ns_1@10.242.238.88:<0.10006.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:50.059,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 241. Nacking mccouch update. [views:debug,2014-08-19T16:50:50.060,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/241. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:50.060,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",241,active,0} [ns_server:debug,2014-08-19T16:50:50.062,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543, 488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904,593, 282,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954, 643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148, 693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509, 198,798,743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002,925,614, 303,248,848,537,482,899,771,716,588,405,277,222,950,822,767,639,456,328,873, 690,562,507,379,196,1001,924,796,741,613,430,302,247,975,847,664,536,481,353, 170,898,770,715,587,404,276,949,821,766,638,455,327,144,872,689,561,506,378, 1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352,897,769,714,586, 403,275,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739, 611,428,300,245,973,845,662,534,479,351,168,896,768,713,585,402,274,947,819, 764,636,453,325,142,998,870,687,559,504,376,921,793,738,610,427,299,244,116, 972,844,661,533,478,350,895,712,584,401,273,218,1023,946,818,763,635,452,324, 997,869,686,558,503,375,192,920,792,737,609,426,298,243,971,843,660,532,477, 349,166,894,711,583,400,272,1022,945,817,762,634,451,323,140,996,868,685,557, 502,374,919,791,736,608,425,297,242,114,970,842,659,531,476,348,893,710,582, 399,271,216,1021,944,816,761,633,450,322,995,867,684,556,501,373,190,918,790, 735,607,424,296,241,969,841,658,530,475,347,164,892,709,581,398,270,1020,943, 815,760,632,449,321,138,994,866,683,555,500,372,917,789,734,606,423,295,240, 112,968,840,657,529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448, 320,993,865,682,554,499,371,188,916,788,733,605,422,294,967,839,656,528,473, 345,162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553, 498,370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578, 395,267,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786, 731,603,420,292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811, 756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108, 964,836,653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316, 989,861,678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341, 158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494, 366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263, 208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599, 416,288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649, 521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674, 546,491,363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699, 571,388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698, 387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880,569,258, 803,748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619, 308,253,853,542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124, 980,669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306,251,851,540, 485,174,774,719,408,953,642,331,876,565,510,799,744,433,122,978,667,356,901, 590,279,224,824,513,458,692,381,1003,926,615,304,249,849,538,483,172,772,717, 406,951,640,329,874,563,508,797,742,431,120,976,665,354] [rebalance:info,2014-08-19T16:50:50.118,ns_1@10.242.238.88:<0.11468.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 916 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:50.118,ns_1@10.242.238.88:<0.10925.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 923 state change: {'ns_1@10.242.238.88',active,paused,undefined} [views:debug,2014-08-19T16:50:50.118,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/241. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:50.119,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",241,active,0} [ns_server:info,2014-08-19T16:50:50.119,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 916 state to active [rebalance:info,2014-08-19T16:50:50.121,ns_1@10.242.238.88:<0.11468.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 916 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:50.121,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 923 state to active [rebalance:info,2014-08-19T16:50:50.122,ns_1@10.242.238.88:<0.10925.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 923 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:50.123,ns_1@10.242.238.88:<0.11468.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:50.123,ns_1@10.242.238.88:<0.10925.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:50.202,ns_1@10.242.238.88:<0.10766.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 925 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:50.202,ns_1@10.242.238.88:<0.11314.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 918 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:50.203,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 925 state to active [rebalance:info,2014-08-19T16:50:50.204,ns_1@10.242.238.88:<0.10766.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 925 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:50.204,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 918 state to active [rebalance:info,2014-08-19T16:50:50.205,ns_1@10.242.238.88:<0.11314.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 918 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:50.205,ns_1@10.242.238.88:<0.10766.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:50.206,ns_1@10.242.238.88:<0.11314.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:50.243,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 239. Nacking mccouch update. [views:debug,2014-08-19T16:50:50.244,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/239. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:50.244,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",239,active,0} [ns_server:debug,2014-08-19T16:50:50.246,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543, 488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904,593, 282,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954, 643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148, 693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509, 198,798,743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002,925,614, 303,248,848,537,482,771,716,405,950,822,767,639,456,328,873,690,562,507,379, 196,1001,924,796,741,613,430,302,247,975,847,664,536,481,353,170,898,770,715, 587,404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740, 612,429,301,246,118,974,846,663,535,480,352,897,769,714,586,403,275,220,948, 820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,245, 973,845,662,534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,325, 142,998,870,687,559,504,376,921,793,738,610,427,299,244,116,972,844,661,533, 478,350,895,712,584,401,273,218,1023,946,818,763,635,452,324,997,869,686,558, 503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894,711, 583,400,272,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,919,791, 736,608,425,297,242,114,970,842,659,531,476,348,893,710,582,399,271,216,1021, 944,816,761,633,450,322,995,867,684,556,501,373,190,918,790,735,607,424,296, 241,969,841,658,530,475,347,164,892,709,581,398,270,1020,943,815,760,632,449, 321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112,968,840,657, 529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682, 554,499,371,188,916,788,733,605,422,294,239,967,839,656,528,473,345,162,890, 707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,915, 787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212, 1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420, 292,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756,628,445, 317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836,653, 525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678, 550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158,886,703, 575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,911,783, 728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208,1013,936, 808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,961, 833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,313,130, 986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521,466,338, 883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674,546,491,363, 180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388,260, 1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387,1009, 932,621,310,255,855,544,489,178,778,723,412,957,646,335,880,569,258,803,748, 437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308,253, 853,542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980,669, 358,903,592,281,226,826,515,460,694,383,1005,928,617,306,251,851,540,485,174, 774,719,408,953,642,331,876,565,510,799,744,433,122,978,667,356,901,590,279, 224,824,513,458,692,381,1003,926,615,304,249,849,538,483,172,772,717,406,951, 640,329,874,563,508,797,742,431,120,976,665,354,899,588,277,222] [rebalance:info,2014-08-19T16:50:50.269,ns_1@10.242.238.88:<0.11162.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 920 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:50.269,ns_1@10.242.238.88:<0.10626.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 927 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:50.270,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 920 state to active [rebalance:info,2014-08-19T16:50:50.271,ns_1@10.242.238.88:<0.11162.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 920 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:50.271,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 927 state to active [rebalance:info,2014-08-19T16:50:50.272,ns_1@10.242.238.88:<0.10626.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 927 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:50.272,ns_1@10.242.238.88:<0.11162.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:50.273,ns_1@10.242.238.88:<0.10626.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:50.328,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/239. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:50.328,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",239,active,0} [rebalance:info,2014-08-19T16:50:50.397,ns_1@10.242.238.88:<0.11002.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 922 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:50.397,ns_1@10.242.238.88:<0.10468.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 929 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:50.397,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 922 state to active [rebalance:info,2014-08-19T16:50:50.398,ns_1@10.242.238.88:<0.11002.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 922 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:50.399,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 929 state to active [rebalance:info,2014-08-19T16:50:50.400,ns_1@10.242.238.88:<0.10468.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 929 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:50.400,ns_1@10.242.238.88:<0.11002.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:50.400,ns_1@10.242.238.88:<0.10468.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:50.486,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 237. Nacking mccouch update. [views:debug,2014-08-19T16:50:50.486,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/237. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:50.486,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",237,active,0} [ns_server:debug,2014-08-19T16:50:50.488,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543, 488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904,593, 282,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954, 643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148, 693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509, 198,798,743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002,925,614, 303,248,848,537,482,771,716,405,950,822,767,639,456,328,873,690,562,507,379, 196,1001,924,796,741,613,430,302,247,975,847,664,536,481,353,170,898,770,715, 587,404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740, 612,429,301,246,118,974,846,663,535,480,352,897,769,714,586,403,275,220,948, 820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,245, 973,845,662,534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,325, 142,998,870,687,559,504,376,921,793,738,610,427,299,244,116,972,844,661,533, 478,350,895,712,584,401,273,218,1023,946,818,763,635,452,324,997,869,686,558, 503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894,711, 583,400,272,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,919,791, 736,608,425,297,242,114,970,842,659,531,476,348,893,710,582,399,271,216,1021, 944,816,761,633,450,322,995,867,684,556,501,373,190,918,790,735,607,424,296, 241,969,841,658,530,475,347,164,892,709,581,398,270,1020,943,815,760,632,449, 321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112,968,840,657, 529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682, 554,499,371,188,916,788,733,605,422,294,239,967,839,656,528,473,345,162,890, 707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,915, 787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212, 1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420, 292,237,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756,628, 445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836, 653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,963,835,652,524,469,341,158,886, 703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,911, 783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208,1013, 936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288, 961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,313, 130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521,466, 338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674,546,491, 363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571,388, 260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387, 1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880,569,258,803, 748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308, 253,853,542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980, 669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306,251,851,540,485, 174,774,719,408,953,642,331,876,565,510,799,744,433,122,978,667,356,901,590, 279,224,824,513,458,692,381,1003,926,615,304,249,849,538,483,172,772,717,406, 951,640,329,874,563,508,797,742,431,120,976,665,354,899,588,277,222] [rebalance:info,2014-08-19T16:50:50.556,ns_1@10.242.238.88:<0.10309.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 931 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:50.556,ns_1@10.242.238.88:<0.10848.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 924 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:50.556,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 931 state to active [rebalance:info,2014-08-19T16:50:50.557,ns_1@10.242.238.88:<0.10309.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 931 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:50.557,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 924 state to active [rebalance:info,2014-08-19T16:50:50.558,ns_1@10.242.238.88:<0.10848.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 924 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:50.559,ns_1@10.242.238.88:<0.10309.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:50.559,ns_1@10.242.238.88:<0.10848.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:50.571,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/237. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:50.571,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",237,active,0} [ns_server:debug,2014-08-19T16:50:50.685,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 235. Nacking mccouch update. [views:debug,2014-08-19T16:50:50.685,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/235. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:50.685,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",235,active,0} [ns_server:debug,2014-08-19T16:50:50.687,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543, 488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904,593, 282,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954, 643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148, 693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509, 198,798,743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002,925,614, 303,248,848,537,482,771,716,405,950,822,767,639,456,328,873,690,562,507,379, 196,1001,924,796,741,613,430,302,247,975,847,664,536,481,353,170,898,770,715, 587,404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740, 612,429,301,246,118,974,846,663,535,480,352,897,769,714,586,403,275,220,948, 820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,245, 973,845,662,534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,325, 142,998,870,687,559,504,376,921,793,738,610,427,299,244,116,972,844,661,533, 478,350,895,712,584,401,273,218,1023,946,818,763,635,452,324,997,869,686,558, 503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894,711, 583,400,272,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,919,791, 736,608,425,297,242,114,970,842,659,531,476,348,893,710,582,399,271,216,1021, 944,816,761,633,450,322,995,867,684,556,501,373,190,918,790,735,607,424,296, 241,969,841,658,530,475,347,164,892,709,581,398,270,1020,943,815,760,632,449, 321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112,968,840,657, 529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682, 554,499,371,188,916,788,733,605,422,294,239,967,839,656,528,473,345,162,890, 707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,915, 787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212, 1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420, 292,237,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756,628, 445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836, 653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158, 886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441, 313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521, 466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674,546, 491,363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699,571, 388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387, 1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880,569,258,803, 748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308, 253,853,542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980, 669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306,251,851,540,485, 174,774,719,408,953,642,331,876,565,510,799,744,433,122,978,667,356,901,590, 279,224,824,513,458,692,381,1003,926,615,304,249,849,538,483,172,772,717,406, 951,640,329,874,563,508,797,742,431,120,976,665,354,899,588,277,222] [rebalance:info,2014-08-19T16:50:50.722,ns_1@10.242.238.88:<0.10139.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 933 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:50.722,ns_1@10.242.238.88:<0.10703.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 926 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:50.723,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 933 state to active [rebalance:info,2014-08-19T16:50:50.725,ns_1@10.242.238.88:<0.10139.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 933 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:50.725,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 926 state to active [rebalance:info,2014-08-19T16:50:50.726,ns_1@10.242.238.88:<0.10703.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 926 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:50.726,ns_1@10.242.238.88:<0.10139.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:50.727,ns_1@10.242.238.88:<0.10703.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:50.745,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/235. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:50.745,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",235,active,0} [rebalance:info,2014-08-19T16:50:50.832,ns_1@10.242.238.88:<0.9981.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 935 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:50:50.832,ns_1@10.242.238.88:<0.10549.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 928 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:50.832,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 935 state to active [rebalance:info,2014-08-19T16:50:50.833,ns_1@10.242.238.88:<0.9981.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 935 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:50:50.833,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 928 state to active [rebalance:info,2014-08-19T16:50:50.834,ns_1@10.242.238.88:<0.10549.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 928 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:50.835,ns_1@10.242.238.88:<0.9981.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:50:50.835,ns_1@10.242.238.88:<0.10549.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:50.844,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 233. Nacking mccouch update. [views:debug,2014-08-19T16:50:50.844,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/233. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:50.845,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",233,active,0} [ns_server:debug,2014-08-19T16:50:50.846,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543, 488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904,593, 282,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954, 643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148, 693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509, 198,798,743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002,925,614, 303,248,848,537,482,771,716,405,950,822,767,639,456,328,873,690,562,507,379, 196,1001,924,796,741,613,430,302,247,975,847,664,536,481,353,170,898,770,715, 587,404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740, 612,429,301,246,118,974,846,663,535,480,352,897,769,714,586,403,275,220,948, 820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,245, 973,845,662,534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,325, 142,998,870,687,559,504,376,921,793,738,610,427,299,244,116,972,844,661,533, 478,350,895,712,584,401,273,218,1023,946,818,763,635,452,324,997,869,686,558, 503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894,711, 583,400,272,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,919,791, 736,608,425,297,242,114,970,842,659,531,476,348,893,710,582,399,271,216,1021, 944,816,761,633,450,322,995,867,684,556,501,373,190,918,790,735,607,424,296, 241,969,841,658,530,475,347,164,892,709,581,398,270,1020,943,815,760,632,449, 321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112,968,840,657, 529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682, 554,499,371,188,916,788,733,605,422,294,239,967,839,656,528,473,345,162,890, 707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,915, 787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212, 1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420, 292,237,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756,628, 445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836, 653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158, 886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,233,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649, 521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674, 546,491,363,180,908,780,725,597,414,286,959,831,648,520,465,337,154,882,699, 571,388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698, 387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880,569,258, 803,748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619, 308,253,853,542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124, 980,669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306,251,851,540, 485,174,774,719,408,953,642,331,876,565,510,799,744,433,122,978,667,356,901, 590,279,224,824,513,458,692,381,1003,926,615,304,249,849,538,483,172,772,717, 406,951,640,329,874,563,508,797,742,431,120,976,665,354,899,588,277,222] [views:debug,2014-08-19T16:50:50.904,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/233. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:50.904,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",233,active,0} [rebalance:info,2014-08-19T16:50:50.940,ns_1@10.242.238.88:<0.12771.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 937) [rebalance:info,2014-08-19T16:50:50.940,ns_1@10.242.238.88:<0.10390.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 930 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:50.940,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 930 state to active [rebalance:info,2014-08-19T16:50:50.941,ns_1@10.242.238.88:<0.9836.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:50.942,ns_1@10.242.238.88:<0.10390.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 930 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:50.942,ns_1@10.242.238.88:<0.10390.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:50:50.944,ns_1@10.242.238.88:<0.9844.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_937_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:50.944,ns_1@10.242.238.88:<0.9836.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:50:50.946,ns_1@10.242.238.88:<0.9836.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 937 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.12789.1> [ns_server:info,2014-08-19T16:50:50.947,ns_1@10.242.238.88:<0.12789.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 937 to state replica [ns_server:debug,2014-08-19T16:50:50.979,ns_1@10.242.238.88:<0.12789.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_937 [rebalance:info,2014-08-19T16:50:50.981,ns_1@10.242.238.88:<0.12789.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[937]}, {checkpoints,[{937,1}]}, {name,<<"rebalance_937">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[937]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"937"}]} [rebalance:debug,2014-08-19T16:50:50.982,ns_1@10.242.238.88:<0.12789.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.12793.1> [rebalance:info,2014-08-19T16:50:50.983,ns_1@10.242.238.88:<0.12789.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:50.984,ns_1@10.242.238.88:<0.12789.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:50.985,ns_1@10.242.238.88:<0.12789.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:50.986,ns_1@10.242.238.88:<0.9836.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 937 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:50.987,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 231. Nacking mccouch update. [views:debug,2014-08-19T16:50:50.987,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/231. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:50.987,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",231,active,0} [rebalance:debug,2014-08-19T16:50:50.987,ns_1@10.242.238.88:<0.9844.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:50.992,ns_1@10.242.238.88:<0.9844.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_937_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:50.992,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 937 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:50.992,ns_1@10.242.238.88:<0.12797.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 937 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:50.992,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,829,518,463,152,697,386,1008,931,620,309,254,854,543, 488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904,593, 282,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409,954, 643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459,148, 693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564,509, 198,798,743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002,925,614, 303,248,848,537,482,771,716,405,950,822,767,639,456,328,873,690,562,507,379, 196,1001,924,796,741,613,430,302,247,975,847,664,536,481,353,170,898,770,715, 587,404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740, 612,429,301,246,118,974,846,663,535,480,352,897,769,714,586,403,275,220,948, 820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,245, 973,845,662,534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,325, 142,998,870,687,559,504,376,921,793,738,610,427,299,244,116,972,844,661,533, 478,350,895,712,584,401,273,218,1023,946,818,763,635,452,324,997,869,686,558, 503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894,711, 583,400,272,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,919,791, 736,608,425,297,242,114,970,842,659,531,476,348,893,710,582,399,271,216,1021, 944,816,761,633,450,322,995,867,684,556,501,373,190,918,790,735,607,424,296, 241,969,841,658,530,475,347,164,892,709,581,398,270,1020,943,815,760,632,449, 321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112,968,840,657, 529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682, 554,499,371,188,916,788,733,605,422,294,239,967,839,656,528,473,345,162,890, 707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,915, 787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212, 1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420, 292,237,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756,628, 445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836, 653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158, 886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,233,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649, 521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674, 546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337,154,882, 699,571,388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464, 698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880,569, 258,803,748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930, 619,308,253,853,542,487,176,776,721,410,955,644,333,878,567,256,801,746,435, 124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306,251,851, 540,485,174,774,719,408,953,642,331,876,565,510,799,744,433,122,978,667,356, 901,590,279,224,824,513,458,692,381,1003,926,615,304,249,849,538,483,172,772, 717,406,951,640,329,874,563,508,797,742,431,120,976,665,354,899,588,277,222] [ns_server:debug,2014-08-19T16:50:50.996,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:50.997,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:50.998,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{937, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:50.998,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.000,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:50:51.018,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 937 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:51.018,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 937) [ns_server:debug,2014-08-19T16:50:51.019,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:51.025,ns_1@10.242.238.88:<0.10216.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 932 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:51.025,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 932 state to active [rebalance:info,2014-08-19T16:50:51.026,ns_1@10.242.238.88:<0.10216.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 932 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:51.027,ns_1@10.242.238.88:<0.10216.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:50:51.046,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/231. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:51.046,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",231,active,0} [rebalance:info,2014-08-19T16:50:51.099,ns_1@10.242.238.88:<0.10062.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 934 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:50:51.099,ns_1@10.242.238.88:<0.19213.0>:ns_memcached:do_handle_call:527]Changed vbucket 934 state to active [rebalance:info,2014-08-19T16:50:51.100,ns_1@10.242.238.88:<0.10062.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 934 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:51.101,ns_1@10.242.238.88:<0.10062.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:51.146,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 229. Nacking mccouch update. [views:debug,2014-08-19T16:50:51.146,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/229. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:51.146,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",229,active,0} [ns_server:debug,2014-08-19T16:50:51.150,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720,409, 954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,825,514,459, 148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875,564, 509,198,798,743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002,925, 614,303,248,848,537,482,771,716,405,950,639,328,873,690,562,507,379,196,1001, 924,796,741,613,430,302,247,975,847,664,536,481,353,170,898,770,715,587,404, 276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612,429, 301,246,118,974,846,663,535,480,352,897,769,714,586,403,275,220,948,820,765, 637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,245,973,845, 662,534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,325,142,998, 870,687,559,504,376,921,793,738,610,427,299,244,116,972,844,661,533,478,350, 895,712,584,401,273,218,1023,946,818,763,635,452,324,997,869,686,558,503,375, 192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894,711,583,400, 272,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,919,791,736,608, 425,297,242,114,970,842,659,531,476,348,893,710,582,399,271,216,1021,944,816, 761,633,450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,241,969, 841,658,530,475,347,164,892,709,581,398,270,1020,943,815,760,632,449,321,138, 994,866,683,555,500,372,917,789,734,606,423,295,240,112,968,840,657,529,474, 346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499, 371,188,916,788,733,605,422,294,239,967,839,656,528,473,345,162,890,707,579, 396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,915,787,732, 604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017,940, 812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237, 965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756,628,445,317, 134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836,653,525, 470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678,550, 495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703, 575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,911,783, 728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208,1013,936, 808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,233, 961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441,313, 130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521,466, 338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674,546,491, 363,180,908,780,725,597,414,286,231,959,831,648,520,465,337,154,882,699,571, 388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698,387, 1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880,569,258,803, 748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619,308, 253,853,542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124,980, 669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306,251,851,540,485, 174,774,719,408,953,642,331,876,565,510,799,744,433,122,978,667,356,901,590, 279,224,824,513,458,692,381,1003,926,615,304,249,849,538,483,172,772,717,406, 951,640,329,874,563,508,797,742,431,120,976,665,354,899,588,277,222,822,767, 456] [rebalance:info,2014-08-19T16:50:51.160,ns_1@10.242.238.88:<0.12834.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 936) [rebalance:info,2014-08-19T16:50:51.160,ns_1@10.242.238.88:<0.12835.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 407) [rebalance:info,2014-08-19T16:50:51.161,ns_1@10.242.238.88:<0.12836.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 409) [rebalance:info,2014-08-19T16:50:51.161,ns_1@10.242.238.88:<0.12837.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 411) [rebalance:info,2014-08-19T16:50:51.161,ns_1@10.242.238.88:<0.9904.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:51.161,ns_1@10.242.238.88:<0.12838.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 406) [rebalance:info,2014-08-19T16:50:51.161,ns_1@10.242.238.88:<0.12839.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 415) [rebalance:info,2014-08-19T16:50:51.161,ns_1@10.242.238.88:<0.12840.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 410) [rebalance:info,2014-08-19T16:50:51.161,ns_1@10.242.238.88:<0.12841.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 423) [rebalance:info,2014-08-19T16:50:51.162,ns_1@10.242.238.88:<0.12842.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 413) [rebalance:info,2014-08-19T16:50:51.162,ns_1@10.242.238.88:<0.12843.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 419) [rebalance:info,2014-08-19T16:50:51.162,ns_1@10.242.238.88:<0.12844.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 408) [rebalance:info,2014-08-19T16:50:51.162,ns_1@10.242.238.88:<0.12845.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 417) [rebalance:info,2014-08-19T16:50:51.162,ns_1@10.242.238.88:<0.12846.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 412) [rebalance:info,2014-08-19T16:50:51.163,ns_1@10.242.238.88:<0.12847.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 421) [rebalance:info,2014-08-19T16:50:51.163,ns_1@10.242.238.88:<0.12848.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 418) [rebalance:info,2014-08-19T16:50:51.163,ns_1@10.242.238.88:<0.12849.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 416) [rebalance:info,2014-08-19T16:50:51.163,ns_1@10.242.238.88:<0.12850.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 663) [rebalance:info,2014-08-19T16:50:51.163,ns_1@10.242.238.88:<0.12851.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 420) [rebalance:info,2014-08-19T16:50:51.163,ns_1@10.242.238.88:<0.12852.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 425) [rebalance:info,2014-08-19T16:50:51.163,ns_1@10.242.238.88:<0.12853.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 414) [rebalance:info,2014-08-19T16:50:51.164,ns_1@10.242.238.88:<0.12854.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 665) [rebalance:info,2014-08-19T16:50:51.164,ns_1@10.242.238.88:<0.12855.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 422) [rebalance:info,2014-08-19T16:50:51.164,ns_1@10.242.238.88:<0.12856.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 667) [rebalance:info,2014-08-19T16:50:51.164,ns_1@10.242.238.88:<0.12857.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 669) [rebalance:info,2014-08-19T16:50:51.164,ns_1@10.242.238.88:<0.12859.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 424) [rebalance:info,2014-08-19T16:50:51.164,ns_1@10.242.238.88:<0.11335.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:51.164,ns_1@10.242.238.88:<0.12860.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 662) [rebalance:info,2014-08-19T16:50:51.165,ns_1@10.242.238.88:<0.11195.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:51.165,ns_1@10.242.238.88:<0.12861.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 671) [ns_server:info,2014-08-19T16:50:51.165,ns_1@10.242.238.88:<0.9912.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_936_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:51.165,ns_1@10.242.238.88:<0.12863.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 675) [rebalance:info,2014-08-19T16:50:51.165,ns_1@10.242.238.88:<0.11023.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:51.165,ns_1@10.242.238.88:<0.12864.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 664) [rebalance:info,2014-08-19T16:50:51.165,ns_1@10.242.238.88:<0.9904.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:51.165,ns_1@10.242.238.88:<0.10869.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:51.165,ns_1@10.242.238.88:<0.12865.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 666) [rebalance:info,2014-08-19T16:50:51.165,ns_1@10.242.238.88:<0.12866.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 674) [rebalance:info,2014-08-19T16:50:51.165,ns_1@10.242.238.88:<0.12867.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 670) [rebalance:info,2014-08-19T16:50:51.165,ns_1@10.242.238.88:<0.12868.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 673) [rebalance:info,2014-08-19T16:50:51.166,ns_1@10.242.238.88:<0.12869.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 672) [rebalance:info,2014-08-19T16:50:51.166,ns_1@10.242.238.88:<0.12870.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 677) [rebalance:info,2014-08-19T16:50:51.166,ns_1@10.242.238.88:<0.12871.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 681) [rebalance:info,2014-08-19T16:50:51.166,ns_1@10.242.238.88:<0.12872.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 668) [rebalance:info,2014-08-19T16:50:51.166,ns_1@10.242.238.88:<0.11412.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:51.166,ns_1@10.242.238.88:<0.12874.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 917) [rebalance:info,2014-08-19T16:50:51.166,ns_1@10.242.238.88:<0.12873.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 679) [rebalance:info,2014-08-19T16:50:51.166,ns_1@10.242.238.88:<0.12875.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 676) [rebalance:info,2014-08-19T16:50:51.166,ns_1@10.242.238.88:<0.12876.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 678) [rebalance:info,2014-08-19T16:50:51.166,ns_1@10.242.238.88:<0.12877.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 921) [rebalance:info,2014-08-19T16:50:51.166,ns_1@10.242.238.88:<0.12878.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 919) [rebalance:info,2014-08-19T16:50:51.166,ns_1@10.242.238.88:<0.10724.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:51.166,ns_1@10.242.238.88:<0.12879.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 680) [rebalance:info,2014-08-19T16:50:51.166,ns_1@10.242.238.88:<0.12880.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 923) [rebalance:info,2014-08-19T16:50:51.167,ns_1@10.242.238.88:<0.12881.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 925) [rebalance:info,2014-08-19T16:50:51.167,ns_1@10.242.238.88:<0.10425.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:51.167,ns_1@10.242.238.88:<0.12882.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 916) [rebalance:info,2014-08-19T16:50:51.167,ns_1@10.242.238.88:<0.11272.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:51.167,ns_1@10.242.238.88:<0.11391.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:51.167,ns_1@10.242.238.88:<0.12883.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 918) [rebalance:info,2014-08-19T16:50:51.167,ns_1@10.242.238.88:<0.12884.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 920) [rebalance:info,2014-08-19T16:50:51.167,ns_1@10.242.238.88:<0.12885.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 927) [rebalance:info,2014-08-19T16:50:51.167,ns_1@10.242.238.88:<0.11118.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:51.167,ns_1@10.242.238.88:<0.12886.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 929) [rebalance:info,2014-08-19T16:50:51.167,ns_1@10.242.238.88:<0.10503.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:51.167,ns_1@10.242.238.88:<0.12887.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 933) [rebalance:info,2014-08-19T16:50:51.167,ns_1@10.242.238.88:<0.12888.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 935) [rebalance:info,2014-08-19T16:50:51.167,ns_1@10.242.238.88:<0.12889.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 922) [rebalance:info,2014-08-19T16:50:51.167,ns_1@10.242.238.88:<0.10800.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:51.167,ns_1@10.242.238.88:<0.12890.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 926) [rebalance:info,2014-08-19T16:50:51.167,ns_1@10.242.238.88:<0.11083.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:51.168,ns_1@10.242.238.88:<0.10570.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:51.168,ns_1@10.242.238.88:<0.12891.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 934) [rebalance:info,2014-08-19T16:50:51.168,ns_1@10.242.238.88:<0.10647.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:51.168,ns_1@10.242.238.88:<0.12892.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 930) [rebalance:info,2014-08-19T16:50:51.168,ns_1@10.242.238.88:<0.12893.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 932) [rebalance:info,2014-08-19T16:50:51.168,ns_1@10.242.238.88:<0.12895.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 931) [rebalance:info,2014-08-19T16:50:51.168,ns_1@10.242.238.88:<0.12894.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 928) [rebalance:info,2014-08-19T16:50:51.168,ns_1@10.242.238.88:<0.10261.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:51.168,ns_1@10.242.238.88:<0.10925.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:51.168,ns_1@10.242.238.88:<0.12897.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 924) [rebalance:info,2014-08-19T16:50:51.168,ns_1@10.242.238.88:<0.11251.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:51.168,ns_1@10.242.238.88:<0.11216.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:51.168,ns_1@10.242.238.88:<0.11370.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:51.168,ns_1@10.242.238.88:<0.11058.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:51.168,ns_1@10.242.238.88:<0.10946.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:51.168,ns_1@10.242.238.88:<0.11314.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:51.168,ns_1@10.242.238.88:<0.10766.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:51.169,ns_1@10.242.238.88:<0.9939.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:51.169,ns_1@10.242.238.88:<0.11433.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:51.169,ns_1@10.242.238.88:<0.10083.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:51.169,ns_1@10.242.238.88:<0.10745.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:51.169,ns_1@10.242.238.88:<0.11139.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:51.169,ns_1@10.242.238.88:<0.11468.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:51.169,ns_1@10.242.238.88:<0.10344.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:51.169,ns_1@10.242.238.88:<0.10104.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:51.169,ns_1@10.242.238.88:<0.10890.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:51.169,ns_1@10.242.238.88:<0.10160.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:51.169,ns_1@10.242.238.88:<0.10446.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:51.169,ns_1@10.242.238.88:<0.11162.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:51.169,ns_1@10.242.238.88:<0.10468.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:51.169,ns_1@10.242.238.88:<0.11293.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:51.170,ns_1@10.242.238.88:<0.10626.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:51.170,ns_1@10.242.238.88:<0.10006.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:50:51.170,ns_1@10.242.238.88:<0.10605.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:51.170,ns_1@10.242.238.88:<0.10139.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:debug,2014-08-19T16:50:51.170,ns_1@10.242.238.88:<0.9904.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 936 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.12896.1> [rebalance:info,2014-08-19T16:50:51.170,ns_1@10.242.238.88:<0.9981.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:50:51.170,ns_1@10.242.238.88:<0.11343.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_663_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.170,ns_1@10.242.238.88:<0.10967.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:51.171,ns_1@10.242.238.88:<0.11002.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:51.171,ns_1@10.242.238.88:<0.11335.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:51.171,ns_1@10.242.238.88:<0.10288.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [ns_server:info,2014-08-19T16:50:51.171,ns_1@10.242.238.88:<0.11203.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_665_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.171,ns_1@10.242.238.88:<0.10703.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:51.171,ns_1@10.242.238.88:<0.10062.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:51.171,ns_1@10.242.238.88:<0.11195.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.171,ns_1@10.242.238.88:<0.12896.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 936 to state replica [rebalance:info,2014-08-19T16:50:51.171,ns_1@10.242.238.88:<0.10848.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:51.171,ns_1@10.242.238.88:<0.10528.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:51.171,ns_1@10.242.238.88:<0.10668.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:51.171,ns_1@10.242.238.88:<0.10549.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:51.171,ns_1@10.242.238.88:<0.10365.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:51.172,ns_1@10.242.238.88:<0.10309.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:51.172,ns_1@10.242.238.88:<0.10827.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:51.172,ns_1@10.242.238.88:<0.9960.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:51.172,ns_1@10.242.238.88:<0.10186.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:51.172,ns_1@10.242.238.88:<0.10216.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:50:51.172,ns_1@10.242.238.88:<0.10041.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:50:51.172,ns_1@10.242.238.88:<0.10390.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:50:51.172,ns_1@10.242.238.88:<0.11031.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_667_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.172,ns_1@10.242.238.88:<0.11023.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.173,ns_1@10.242.238.88:<0.10877.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_669_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.173,ns_1@10.242.238.88:<0.10869.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.177,ns_1@10.242.238.88:<0.11420.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_662_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.177,ns_1@10.242.238.88:<0.11412.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.180,ns_1@10.242.238.88:<0.10732.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_671_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.181,ns_1@10.242.238.88:<0.10724.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.192,ns_1@10.242.238.88:<0.11280.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_664_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.192,ns_1@10.242.238.88:<0.11272.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.193,ns_1@10.242.238.88:<0.10433.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_675_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.193,ns_1@10.242.238.88:<0.10425.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.194,ns_1@10.242.238.88:<0.11399.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_917_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:51.194,ns_1@10.242.238.88:<0.11391.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [views:debug,2014-08-19T16:50:51.197,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/229. Updated state: active (0) [ns_server:info,2014-08-19T16:50:51.197,ns_1@10.242.238.88:<0.11126.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_666_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:51.197,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",229,active,0} [rebalance:info,2014-08-19T16:50:51.197,ns_1@10.242.238.88:<0.11118.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.199,ns_1@10.242.238.88:<0.10511.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_674_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.199,ns_1@10.242.238.88:<0.10503.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.199,ns_1@10.242.238.88:<0.10814.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_670_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.200,ns_1@10.242.238.88:<0.10800.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.200,ns_1@10.242.238.88:<0.10578.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_673_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.200,ns_1@10.242.238.88:<0.10570.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.200,ns_1@10.242.238.88:<0.10269.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_677_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.200,ns_1@10.242.238.88:<0.10261.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.200,ns_1@10.242.238.88:<0.11091.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_921_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:51.201,ns_1@10.242.238.88:<0.11083.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.201,ns_1@10.242.238.88:<0.10655.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_672_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.201,ns_1@10.242.238.88:<0.10647.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.201,ns_1@10.242.238.88:<0.10954.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_668_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.201,ns_1@10.242.238.88:<0.10946.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.201,ns_1@10.242.238.88:<0.9947.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_681_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.202,ns_1@10.242.238.88:<0.9939.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.202,ns_1@10.242.238.88:<0.10933.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_923_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:51.202,ns_1@10.242.238.88:<0.10925.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.202,ns_1@10.242.238.88:<0.11259.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_919_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:51.202,ns_1@10.242.238.88:<0.11251.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.202,ns_1@10.242.238.88:<0.10091.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_679_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.203,ns_1@10.242.238.88:<0.10083.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.203,ns_1@10.242.238.88:<0.10168.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_678_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.203,ns_1@10.242.238.88:<0.10160.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.203,ns_1@10.242.238.88:<0.11322.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_918_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:51.204,ns_1@10.242.238.88:<0.11314.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.204,ns_1@10.242.238.88:<0.10774.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_925_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:51.204,ns_1@10.242.238.88:<0.10766.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.204,ns_1@10.242.238.88:<0.10352.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_676_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.205,ns_1@10.242.238.88:<0.10344.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.205,ns_1@10.242.238.88:<0.11378.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_407_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:50:51.205,ns_1@10.242.238.88:<0.12896.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_936 [rebalance:info,2014-08-19T16:50:51.205,ns_1@10.242.238.88:<0.11370.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.205,ns_1@10.242.238.88:<0.11224.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_409_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.205,ns_1@10.242.238.88:<0.11216.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.206,ns_1@10.242.238.88:<0.11476.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_916_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:51.206,ns_1@10.242.238.88:<0.11468.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.206,ns_1@10.242.238.88:<0.11176.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_920_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:51.206,ns_1@10.242.238.88:<0.11162.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.206,ns_1@10.242.238.88:<0.11441.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_406_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.206,ns_1@10.242.238.88:<0.12896.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[936]}, {checkpoints,[{936,1}]}, {name,<<"rebalance_936">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[936]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"936"}]} [rebalance:info,2014-08-19T16:50:51.206,ns_1@10.242.238.88:<0.11433.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.207,ns_1@10.242.238.88:<0.10014.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_680_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.207,ns_1@10.242.238.88:<0.10006.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:debug,2014-08-19T16:50:51.207,ns_1@10.242.238.88:<0.12896.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.12961.1> [ns_server:info,2014-08-19T16:50:51.212,ns_1@10.242.238.88:<0.11147.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_410_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.213,ns_1@10.242.238.88:<0.11139.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.213,ns_1@10.242.238.88:<0.10476.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_929_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:51.213,ns_1@10.242.238.88:<0.12896.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:51.213,ns_1@10.242.238.88:<0.10468.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.213,ns_1@10.242.238.88:<0.10112.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_423_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.213,ns_1@10.242.238.88:<0.10104.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.214,ns_1@10.242.238.88:<0.10898.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_413_'ns_1@10.242.238.89'">>] [ns_server:info,2014-08-19T16:50:51.214,ns_1@10.242.238.88:<0.10454.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_419_'ns_1@10.242.238.89'">>] [ns_server:info,2014-08-19T16:50:51.214,ns_1@10.242.238.88:<0.11066.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_411_'ns_1@10.242.238.89'">>] [ns_server:info,2014-08-19T16:50:51.214,ns_1@10.242.238.88:<0.10753.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_415_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.214,ns_1@10.242.238.88:<0.10890.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.214,ns_1@10.242.238.88:<0.11301.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_408_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.214,ns_1@10.242.238.88:<0.10446.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:51.214,ns_1@10.242.238.88:<0.11058.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:51.214,ns_1@10.242.238.88:<0.10745.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.214,ns_1@10.242.238.88:<0.10147.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_933_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:51.214,ns_1@10.242.238.88:<0.11293.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:51.214,ns_1@10.242.238.88:<0.10139.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.214,ns_1@10.242.238.88:<0.10634.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_927_'ns_1@10.242.238.91'">>] [ns_server:info,2014-08-19T16:50:51.215,ns_1@10.242.238.88:<0.9989.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_935_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:51.215,ns_1@10.242.238.88:<0.10626.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:51.215,ns_1@10.242.238.88:<0.9981.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.215,ns_1@10.242.238.88:<0.10975.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_412_'ns_1@10.242.238.89'">>] [ns_server:info,2014-08-19T16:50:51.215,ns_1@10.242.238.88:<0.10613.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_417_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.215,ns_1@10.242.238.88:<0.10967.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.215,ns_1@10.242.238.88:<0.10070.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_934_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:51.215,ns_1@10.242.238.88:<0.10605.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.215,ns_1@10.242.238.88:<0.10296.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_421_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.216,ns_1@10.242.238.88:<0.10062.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:51.216,ns_1@10.242.238.88:<0.10288.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.216,ns_1@10.242.238.88:<0.10711.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_926_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:51.216,ns_1@10.242.238.88:<0.10703.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.216,ns_1@10.242.238.88:<0.10676.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_416_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.216,ns_1@10.242.238.88:<0.10668.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.216,ns_1@10.242.238.88:<0.11010.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_922_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:51.217,ns_1@10.242.238.88:<0.11002.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.217,ns_1@10.242.238.88:<0.10536.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_418_'ns_1@10.242.238.89'">>] [ns_server:info,2014-08-19T16:50:51.217,ns_1@10.242.238.88:<0.10856.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_924_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:51.217,ns_1@10.242.238.88:<0.10528.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:51.217,ns_1@10.242.238.88:<0.10848.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.217,ns_1@10.242.238.88:<0.10557.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_928_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:51.217,ns_1@10.242.238.88:<0.10549.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:debug,2014-08-19T16:50:51.219,ns_1@10.242.238.88:<0.12896.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.220,ns_1@10.242.238.88:<0.12896.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:50:51.223,ns_1@10.242.238.88:<0.10317.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_931_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:51.223,ns_1@10.242.238.88:<0.10309.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.223,ns_1@10.242.238.88:<0.9968.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_425_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.223,ns_1@10.242.238.88:<0.9960.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.223,ns_1@10.242.238.88:<0.10224.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_932_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:51.223,ns_1@10.242.238.88:<0.10216.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.224,ns_1@10.242.238.88:<0.10835.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_414_'ns_1@10.242.238.89'">>] [ns_server:info,2014-08-19T16:50:51.224,ns_1@10.242.238.88:<0.10049.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_424_'ns_1@10.242.238.89'">>] [ns_server:info,2014-08-19T16:50:51.224,ns_1@10.242.238.88:<0.10398.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_930_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:50:51.224,ns_1@10.242.238.88:<0.10041.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:51.224,ns_1@10.242.238.88:<0.10827.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:51.224,ns_1@10.242.238.88:<0.9904.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 936 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:51.224,ns_1@10.242.238.88:<0.10390.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:50:51.224,ns_1@10.242.238.88:<0.10373.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_420_'ns_1@10.242.238.89'">>] [ns_server:info,2014-08-19T16:50:51.224,ns_1@10.242.238.88:<0.10197.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_422_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.225,ns_1@10.242.238.88:<0.10186.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:50:51.224,ns_1@10.242.238.88:<0.10365.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:debug,2014-08-19T16:50:51.226,ns_1@10.242.238.88:<0.9912.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.235,ns_1@10.242.238.88:<0.11335.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 663 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.12972.1> [ns_server:debug,2014-08-19T16:50:51.235,ns_1@10.242.238.88:<0.11195.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 665 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.12982.1> [ns_server:info,2014-08-19T16:50:51.236,ns_1@10.242.238.88:<0.12972.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 663 to state replica [ns_server:info,2014-08-19T16:50:51.237,ns_1@10.242.238.88:<0.12982.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 665 to state replica [ns_server:debug,2014-08-19T16:50:51.244,ns_1@10.242.238.88:<0.11023.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 667 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.13010.1> [ns_server:debug,2014-08-19T16:50:51.244,ns_1@10.242.238.88:<0.10869.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 669 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.13017.1> [ns_server:info,2014-08-19T16:50:51.245,ns_1@10.242.238.88:<0.13010.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 667 to state replica [ns_server:info,2014-08-19T16:50:51.245,ns_1@10.242.238.88:<0.13017.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 669 to state replica [ns_server:debug,2014-08-19T16:50:51.250,ns_1@10.242.238.88:<0.11412.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 662 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.13023.1> [ns_server:info,2014-08-19T16:50:51.251,ns_1@10.242.238.88:<0.13023.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 662 to state replica [ns_server:debug,2014-08-19T16:50:51.260,ns_1@10.242.238.88:<0.10724.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 671 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.13024.1> [ns_server:debug,2014-08-19T16:50:51.267,ns_1@10.242.238.88:<0.11272.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 664 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.13025.1> [ns_server:info,2014-08-19T16:50:51.276,ns_1@10.242.238.88:<0.13024.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 671 to state replica [ns_server:info,2014-08-19T16:50:51.277,ns_1@10.242.238.88:<0.13025.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 664 to state replica [ns_server:debug,2014-08-19T16:50:51.278,ns_1@10.242.238.88:<0.10425.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 675 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.13026.1> [ns_server:debug,2014-08-19T16:50:51.284,ns_1@10.242.238.88:<0.11391.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 917 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.13027.1> [ns_server:debug,2014-08-19T16:50:51.285,ns_1@10.242.238.88:<0.12972.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_663 [ns_server:debug,2014-08-19T16:50:51.289,ns_1@10.242.238.88:<0.10503.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 674 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.13028.1> [ns_server:info,2014-08-19T16:50:51.290,ns_1@10.242.238.88:<0.13026.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 675 to state replica [ns_server:debug,2014-08-19T16:50:51.290,ns_1@10.242.238.88:<0.10800.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 670 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.13029.1> [ns_server:info,2014-08-19T16:50:51.291,ns_1@10.242.238.88:<0.13027.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 917 to state replica [ns_server:debug,2014-08-19T16:50:51.292,ns_1@10.242.238.88:<0.10261.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 677 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.13030.1> [ns_server:debug,2014-08-19T16:50:51.292,ns_1@10.242.238.88:<0.10570.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 673 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.13031.1> [ns_server:debug,2014-08-19T16:50:51.293,ns_1@10.242.238.88:<0.11083.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 921 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.13032.1> [ns_server:debug,2014-08-19T16:50:51.298,ns_1@10.242.238.88:<0.11118.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 666 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.13033.1> [ns_server:debug,2014-08-19T16:50:51.300,ns_1@10.242.238.88:<0.10647.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 672 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.13034.1> [ns_server:debug,2014-08-19T16:50:51.300,ns_1@10.242.238.88:<0.9939.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 681 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.13035.1> [ns_server:debug,2014-08-19T16:50:51.300,ns_1@10.242.238.88:<0.10946.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 668 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.13036.1> [ns_server:debug,2014-08-19T16:50:51.301,ns_1@10.242.238.88:<0.11251.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 919 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.13038.1> [ns_server:debug,2014-08-19T16:50:51.301,ns_1@10.242.238.88:<0.10925.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 923 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.13037.1> [ns_server:info,2014-08-19T16:50:51.301,ns_1@10.242.238.88:<0.13029.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 670 to state replica [ns_server:info,2014-08-19T16:50:51.301,ns_1@10.242.238.88:<0.13033.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 666 to state replica [ns_server:info,2014-08-19T16:50:51.301,ns_1@10.242.238.88:<0.13035.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 681 to state replica [ns_server:info,2014-08-19T16:50:51.301,ns_1@10.242.238.88:<0.13028.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 674 to state replica [ns_server:info,2014-08-19T16:50:51.301,ns_1@10.242.238.88:<0.13034.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 672 to state replica [ns_server:info,2014-08-19T16:50:51.301,ns_1@10.242.238.88:<0.13032.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 921 to state replica [ns_server:info,2014-08-19T16:50:51.301,ns_1@10.242.238.88:<0.13031.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 673 to state replica [ns_server:info,2014-08-19T16:50:51.301,ns_1@10.242.238.88:<0.13036.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 668 to state replica [ns_server:debug,2014-08-19T16:50:51.302,ns_1@10.242.238.88:<0.13010.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_667 [ns_server:debug,2014-08-19T16:50:51.303,ns_1@10.242.238.88:<0.10083.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 679 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.13039.1> [ns_server:debug,2014-08-19T16:50:51.303,ns_1@10.242.238.88:<0.11314.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 918 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.13040.1> [ns_server:debug,2014-08-19T16:50:51.304,ns_1@10.242.238.88:<0.11216.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 409 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.13041.1> [ns_server:debug,2014-08-19T16:50:51.304,ns_1@10.242.238.88:<0.10160.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 678 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.13042.1> [ns_server:debug,2014-08-19T16:50:51.304,ns_1@10.242.238.88:<0.11468.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 916 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.13044.1> [ns_server:debug,2014-08-19T16:50:51.304,ns_1@10.242.238.88:<0.10344.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 676 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.13047.1> [ns_server:debug,2014-08-19T16:50:51.304,ns_1@10.242.238.88:<0.11433.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 406 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.13048.1> [ns_server:debug,2014-08-19T16:50:51.304,ns_1@10.242.238.88:<0.11370.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 407 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.13046.1> [ns_server:debug,2014-08-19T16:50:51.304,ns_1@10.242.238.88:<0.10006.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 680 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.13050.1> [ns_server:debug,2014-08-19T16:50:51.305,ns_1@10.242.238.88:<0.10890.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 413 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.13049.1> [ns_server:debug,2014-08-19T16:50:51.305,ns_1@10.242.238.88:<0.10104.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 423 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.13053.1> [ns_server:debug,2014-08-19T16:50:51.305,ns_1@10.242.238.88:<0.10139.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 933 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.13054.1> [ns_server:debug,2014-08-19T16:50:51.305,ns_1@10.242.238.88:<0.10446.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 419 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.13055.1> [ns_server:debug,2014-08-19T16:50:51.305,ns_1@10.242.238.88:<0.10468.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 929 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.13051.1> [ns_server:debug,2014-08-19T16:50:51.305,ns_1@10.242.238.88:<0.10626.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 927 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.13057.1> [ns_server:debug,2014-08-19T16:50:51.305,ns_1@10.242.238.88:<0.11293.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 408 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.13056.1> [ns_server:debug,2014-08-19T16:50:51.305,ns_1@10.242.238.88:<0.10967.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 412 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.13058.1> [ns_server:debug,2014-08-19T16:50:51.305,ns_1@10.242.238.88:<0.10605.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 417 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.13060.1> [ns_server:debug,2014-08-19T16:50:51.306,ns_1@10.242.238.88:<0.11058.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 411 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.13059.1> [ns_server:debug,2014-08-19T16:50:51.306,ns_1@10.242.238.88:<0.10062.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 934 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.13066.1> [ns_server:debug,2014-08-19T16:50:51.306,ns_1@10.242.238.88:<0.10703.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 926 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.13069.1> [ns_server:debug,2014-08-19T16:50:51.306,ns_1@10.242.238.88:<0.10288.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 421 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.13068.1> [ns_server:debug,2014-08-19T16:50:51.306,ns_1@10.242.238.88:<0.10745.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 415 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.13070.1> [ns_server:debug,2014-08-19T16:50:51.306,ns_1@10.242.238.88:<0.9981.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 935 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.13071.1> [ns_server:debug,2014-08-19T16:50:51.306,ns_1@10.242.238.88:<0.10668.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 416 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.13074.1> [ns_server:debug,2014-08-19T16:50:51.306,ns_1@10.242.238.88:<0.10528.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 418 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.13075.1> [ns_server:debug,2014-08-19T16:50:51.306,ns_1@10.242.238.88:<0.10848.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 924 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.13077.1> [ns_server:debug,2014-08-19T16:50:51.306,ns_1@10.242.238.88:<0.10309.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 931 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.13079.1> [ns_server:debug,2014-08-19T16:50:51.307,ns_1@10.242.238.88:<0.10827.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 414 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.13081.1> [ns_server:debug,2014-08-19T16:50:51.307,ns_1@10.242.238.88:<0.10186.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 422 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.13087.1> [ns_server:debug,2014-08-19T16:50:51.307,ns_1@10.242.238.88:<0.10390.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 930 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.13089.1> [ns_server:info,2014-08-19T16:50:51.307,ns_1@10.242.238.88:<0.13038.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 919 to state replica [ns_server:debug,2014-08-19T16:50:51.307,ns_1@10.242.238.88:<0.12982.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_665 [ns_server:debug,2014-08-19T16:50:51.307,ns_1@10.242.238.88:<0.11162.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 920 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.13045.1> [ns_server:info,2014-08-19T16:50:51.307,ns_1@10.242.238.88:<0.13030.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 677 to state replica [ns_server:debug,2014-08-19T16:50:51.307,ns_1@10.242.238.88:<0.10041.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 424 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.13082.1> [ns_server:debug,2014-08-19T16:50:51.307,ns_1@10.242.238.88:<0.11002.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 922 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.13076.1> [ns_server:info,2014-08-19T16:50:51.307,ns_1@10.242.238.88:<0.13037.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 923 to state replica [rebalance:info,2014-08-19T16:50:51.312,ns_1@10.242.238.88:<0.12972.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[663]}, {checkpoints,[{663,1}]}, {name,<<"rebalance_663">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[663]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"663"}]} [ns_server:debug,2014-08-19T16:50:51.314,ns_1@10.242.238.88:<0.13017.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_669 [ns_server:debug,2014-08-19T16:50:51.316,ns_1@10.242.238.88:<0.9960.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 425 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.13080.1> [ns_server:debug,2014-08-19T16:50:51.316,ns_1@10.242.238.88:<0.11139.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 410 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.13052.1> [ns_server:debug,2014-08-19T16:50:51.316,ns_1@10.242.238.88:<0.10365.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 420 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.13091.1> [ns_server:debug,2014-08-19T16:50:51.316,ns_1@10.242.238.88:<0.10766.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 925 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.13043.1> [ns_server:debug,2014-08-19T16:50:51.321,ns_1@10.242.238.88:<0.10549.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 928 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.13078.1> [ns_server:debug,2014-08-19T16:50:51.321,ns_1@10.242.238.88:<0.10216.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 932 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.13092.1> [ns_server:info,2014-08-19T16:50:51.326,ns_1@10.242.238.88:<0.13079.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 931 to state replica [ns_server:info,2014-08-19T16:50:51.326,ns_1@10.242.238.88:<0.13071.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 935 to state replica [ns_server:info,2014-08-19T16:50:51.326,ns_1@10.242.238.88:<0.13057.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 927 to state replica [ns_server:info,2014-08-19T16:50:51.326,ns_1@10.242.238.88:<0.13058.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 412 to state replica [ns_server:info,2014-08-19T16:50:51.326,ns_1@10.242.238.88:<0.13048.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 406 to state replica [ns_server:info,2014-08-19T16:50:51.326,ns_1@10.242.238.88:<0.13040.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 918 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13039.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 679 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13041.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 409 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13089.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 930 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13066.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 934 to state replica [rebalance:debug,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.12972.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13094.1> [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13068.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 421 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13075.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 418 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13087.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 422 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13055.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 419 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13076.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 922 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13077.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 924 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13070.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 415 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13045.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 920 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13044.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 916 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13059.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 411 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13046.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 407 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13069.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 926 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13049.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 413 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13082.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 424 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13051.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 929 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13074.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 416 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13050.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 680 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13047.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 676 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13053.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 423 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13056.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 408 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13060.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 417 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13081.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 414 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13054.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 933 to state replica [ns_server:info,2014-08-19T16:50:51.327,ns_1@10.242.238.88:<0.13042.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 678 to state replica [ns_server:info,2014-08-19T16:50:51.331,ns_1@10.242.238.88:<0.13091.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 420 to state replica [ns_server:info,2014-08-19T16:50:51.331,ns_1@10.242.238.88:<0.13078.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 928 to state replica [rebalance:info,2014-08-19T16:50:51.331,ns_1@10.242.238.88:<0.13010.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[667]}, {checkpoints,[{667,1}]}, {name,<<"rebalance_667">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[667]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"667"}]} [ns_server:info,2014-08-19T16:50:51.331,ns_1@10.242.238.88:<0.13052.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 410 to state replica [ns_server:info,2014-08-19T16:50:51.331,ns_1@10.242.238.88:<0.13080.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 425 to state replica [ns_server:info,2014-08-19T16:50:51.331,ns_1@10.242.238.88:<0.13092.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 932 to state replica [ns_server:info,2014-08-19T16:50:51.331,ns_1@10.242.238.88:<0.13043.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 925 to state replica [ns_server:info,2014-08-19T16:50:51.331,ns_1@10.242.238.88:<0.9912.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_936_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.331,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 936 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:51.331,ns_1@10.242.238.88:<0.12982.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[665]}, {checkpoints,[{665,1}]}, {name,<<"rebalance_665">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[665]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"665"}]} [rebalance:info,2014-08-19T16:50:51.331,ns_1@10.242.238.88:<0.13017.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[669]}, {checkpoints,[{669,1}]}, {name,<<"rebalance_669">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[669]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"669"}]} [rebalance:info,2014-08-19T16:50:51.332,ns_1@10.242.238.88:<0.13097.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 936 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:debug,2014-08-19T16:50:51.332,ns_1@10.242.238.88:<0.13010.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13098.1> [rebalance:debug,2014-08-19T16:50:51.332,ns_1@10.242.238.88:<0.12982.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13099.1> [rebalance:debug,2014-08-19T16:50:51.332,ns_1@10.242.238.88:<0.13017.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13100.1> [rebalance:info,2014-08-19T16:50:51.333,ns_1@10.242.238.88:<0.12972.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:51.333,ns_1@10.242.238.88:<0.13010.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:51.333,ns_1@10.242.238.88:<0.12982.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:51.333,ns_1@10.242.238.88:<0.13017.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:51.334,ns_1@10.242.238.88:<0.13023.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_662 [rebalance:debug,2014-08-19T16:50:51.335,ns_1@10.242.238.88:<0.12972.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.335,ns_1@10.242.238.88:<0.12972.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:debug,2014-08-19T16:50:51.336,ns_1@10.242.238.88:<0.13010.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.336,ns_1@10.242.238.88:<0.13010.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.336,ns_1@10.242.238.88:<0.11335.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 663 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:51.340,ns_1@10.242.238.88:<0.13017.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.340,ns_1@10.242.238.88:<0.13017.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:51.342,ns_1@10.242.238.88:<0.13025.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_664 [rebalance:debug,2014-08-19T16:50:51.342,ns_1@10.242.238.88:<0.12982.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:debug,2014-08-19T16:50:51.342,ns_1@10.242.238.88:<0.11343.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:51.342,ns_1@10.242.238.88:<0.12982.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.342,ns_1@10.242.238.88:<0.10869.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 669 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:51.342,ns_1@10.242.238.88:<0.11023.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 667 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:51.342,ns_1@10.242.238.88:<0.13023.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[662]}, {checkpoints,[{662,1}]}, {name,<<"rebalance_662">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[662]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"662"}]} [rebalance:debug,2014-08-19T16:50:51.343,ns_1@10.242.238.88:<0.13023.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13101.1> [rebalance:info,2014-08-19T16:50:51.343,ns_1@10.242.238.88:<0.11195.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 665 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:51.343,ns_1@10.242.238.88:<0.13025.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[664]}, {checkpoints,[{664,1}]}, {name,<<"rebalance_664">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[664]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"664"}]} [rebalance:debug,2014-08-19T16:50:51.344,ns_1@10.242.238.88:<0.13025.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13102.1> [rebalance:info,2014-08-19T16:50:51.344,ns_1@10.242.238.88:<0.13023.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.344,ns_1@10.242.238.88:<0.10877.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:51.347,ns_1@10.242.238.88:<0.13025.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.347,ns_1@10.242.238.88:<0.11203.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:50:51.347,ns_1@10.242.238.88:<0.11031.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:50:51.353,ns_1@10.242.238.88:<0.13023.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [ns_server:info,2014-08-19T16:50:51.353,ns_1@10.242.238.88:<0.11343.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_663_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.353,ns_1@10.242.238.88:<0.13023.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:51.353,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 227. Nacking mccouch update. [ns_server:debug,2014-08-19T16:50:51.354,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:50:51.354,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/227. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:51.354,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",227,active,0} [ns_server:debug,2014-08-19T16:50:51.354,ns_1@10.242.238.88:<0.13024.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_671 [ns_server:info,2014-08-19T16:50:51.354,ns_1@10.242.238.88:<0.10877.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_669_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:50:51.355,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{936, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:51.356,ns_1@10.242.238.88:<0.13025.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.356,ns_1@10.242.238.88:<0.11412.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 662 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:info,2014-08-19T16:50:51.356,ns_1@10.242.238.88:<0.11203.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_665_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.356,ns_1@10.242.238.88:<0.13025.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:51.357,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:50:51.357,ns_1@10.242.238.88:<0.13024.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[671]}, {checkpoints,[{671,1}]}, {name,<<"rebalance_671">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[671]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"671"}]} [ns_server:info,2014-08-19T16:50:51.357,ns_1@10.242.238.88:<0.11031.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_667_'ns_1@10.242.238.89'">>] [rebalance:debug,2014-08-19T16:50:51.358,ns_1@10.242.238.88:<0.13024.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13114.1> [rebalance:debug,2014-08-19T16:50:51.360,ns_1@10.242.238.88:<0.11420.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:51.360,ns_1@10.242.238.88:<0.11272.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 664 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:51.361,ns_1@10.242.238.88:<0.13024.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:51.361,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,825,514, 459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330,875, 564,509,198,798,743,432,977,666,355,900,589,278,823,512,457,146,691,380,1002, 925,614,303,248,848,537,482,771,716,405,950,639,328,873,690,562,507,379,196, 1001,924,796,741,613,430,302,247,975,847,664,536,481,353,170,898,770,715,587, 404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612, 429,301,246,118,974,846,663,535,480,352,897,769,714,586,403,275,220,948,820, 765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,245,973, 845,662,534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,325,142, 998,870,687,559,504,376,921,793,738,610,427,299,244,116,972,844,661,533,478, 350,895,712,584,401,273,218,1023,946,818,763,635,452,324,997,869,686,558,503, 375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894,711,583, 400,272,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,919,791,736, 608,425,297,242,114,970,842,659,531,476,348,893,710,582,399,271,216,1021,944, 816,761,633,450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,241, 969,841,658,530,475,347,164,892,709,581,398,270,1020,943,815,760,632,449,321, 138,994,866,683,555,500,372,917,789,734,606,423,295,240,112,968,840,657,529, 474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554, 499,371,188,916,788,733,605,422,294,239,967,839,656,528,473,345,162,890,707, 579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,915,787, 732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017, 940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292, 237,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756,628,445, 317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836,653, 525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678, 550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886, 703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,911, 783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208,1013, 936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288, 233,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624,441, 313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521, 466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674,546, 491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337,154,882,699, 571,388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698, 387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880,569,258, 803,748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930,619, 308,253,853,542,487,176,776,721,410,955,644,333,878,567,256,801,746,435,124, 980,669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306,251,851,540, 485,174,774,719,408,953,642,331,876,565,510,799,744,433,122,978,667,356,901, 590,279,224,824,513,458,692,381,1003,926,615,304,249,849,538,483,172,772,717, 406,951,640,329,874,563,508,797,742,431,120,976,665,354,899,588,277,222,822, 767,456] [rebalance:debug,2014-08-19T16:50:51.362,ns_1@10.242.238.88:<0.13024.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:debug,2014-08-19T16:50:51.362,ns_1@10.242.238.88:<0.11280.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:51.362,ns_1@10.242.238.88:<0.13024.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:51.363,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.363,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:51.363,ns_1@10.242.238.88:<0.10724.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 671 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:info,2014-08-19T16:50:51.363,ns_1@10.242.238.88:<0.11420.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_662_'ns_1@10.242.238.89'">>] [rebalance:debug,2014-08-19T16:50:51.365,ns_1@10.242.238.88:<0.10732.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:51.365,ns_1@10.242.238.88:<0.11280.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_664_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.366,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 936 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:51.366,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 936) [ns_server:debug,2014-08-19T16:50:51.367,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:51.367,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 663 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:51.367,ns_1@10.242.238.88:<0.13126.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 663 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:info,2014-08-19T16:50:51.368,ns_1@10.242.238.88:<0.10732.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_671_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:50:51.370,ns_1@10.242.238.88:<0.13026.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_675 [ns_server:debug,2014-08-19T16:50:51.371,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.371,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.372,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:51.372,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{663, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:51.373,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:51.375,ns_1@10.242.238.88:<0.13026.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[675]}, {checkpoints,[{675,1}]}, {name,<<"rebalance_675">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[675]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"675"}]} [rebalance:debug,2014-08-19T16:50:51.376,ns_1@10.242.238.88:<0.13026.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13144.1> [rebalance:info,2014-08-19T16:50:51.378,ns_1@10.242.238.88:<0.13026.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.379,ns_1@10.242.238.88:<0.13026.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.380,ns_1@10.242.238.88:<0.13026.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.380,ns_1@10.242.238.88:<0.10425.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 675 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:51.382,ns_1@10.242.238.88:<0.10433.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.383,ns_1@10.242.238.88:<0.13028.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_674 [ns_server:info,2014-08-19T16:50:51.387,ns_1@10.242.238.88:<0.10433.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_675_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.387,ns_1@10.242.238.88:<0.13028.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[674]}, {checkpoints,[{674,1}]}, {name,<<"rebalance_674">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[674]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"674"}]} [rebalance:debug,2014-08-19T16:50:51.388,ns_1@10.242.238.88:<0.13028.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13147.1> [rebalance:info,2014-08-19T16:50:51.389,ns_1@10.242.238.88:<0.13028.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.391,ns_1@10.242.238.88:<0.13028.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.391,ns_1@10.242.238.88:<0.13028.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.392,ns_1@10.242.238.88:<0.10503.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 674 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:51.394,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 663 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:51.394,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 663) [ns_server:debug,2014-08-19T16:50:51.395,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:51.395,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 669 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:51.395,ns_1@10.242.238.88:<0.13151.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 669 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:51.397,ns_1@10.242.238.88:<0.13047.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_676 [rebalance:info,2014-08-19T16:50:51.398,ns_1@10.242.238.88:<0.13047.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[676]}, {checkpoints,[{676,1}]}, {name,<<"rebalance_676">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[676]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"676"}]} [rebalance:debug,2014-08-19T16:50:51.399,ns_1@10.242.238.88:<0.13047.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13152.1> [rebalance:info,2014-08-19T16:50:51.400,ns_1@10.242.238.88:<0.13047.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.401,ns_1@10.242.238.88:<0.13047.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.402,ns_1@10.242.238.88:<0.13047.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.403,ns_1@10.242.238.88:<0.10344.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 676 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:51.405,ns_1@10.242.238.88:<0.10511.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:50:51.406,ns_1@10.242.238.88:<0.10352.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:51.409,ns_1@10.242.238.88:<0.10511.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_674_'ns_1@10.242.238.89'">>] [ns_server:info,2014-08-19T16:50:51.410,ns_1@10.242.238.88:<0.10352.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_676_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:50:51.411,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.412,ns_1@10.242.238.88:<0.13036.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_668 [ns_server:debug,2014-08-19T16:50:51.412,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.412,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:51.412,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.413,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{669, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:51.414,ns_1@10.242.238.88:<0.13036.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[668]}, {checkpoints,[{668,1}]}, {name,<<"rebalance_668">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[668]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"668"}]} [rebalance:debug,2014-08-19T16:50:51.414,ns_1@10.242.238.88:<0.13036.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13159.1> [rebalance:info,2014-08-19T16:50:51.415,ns_1@10.242.238.88:<0.13036.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.418,ns_1@10.242.238.88:<0.13036.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.418,ns_1@10.242.238.88:<0.13036.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.419,ns_1@10.242.238.88:<0.10946.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 668 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:51.421,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 669 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:51.421,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 669) [rebalance:debug,2014-08-19T16:50:51.422,ns_1@10.242.238.88:<0.10954.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.422,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:51.422,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 665 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:51.422,ns_1@10.242.238.88:<0.13168.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 665 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:info,2014-08-19T16:50:51.426,ns_1@10.242.238.88:<0.10954.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_668_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:50:51.426,ns_1@10.242.238.88:<0.13069.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_926 [rebalance:info,2014-08-19T16:50:51.428,ns_1@10.242.238.88:<0.13069.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[926]}, {checkpoints,[{926,1}]}, {name,<<"rebalance_926">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[926]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"926"}]} [views:debug,2014-08-19T16:50:51.429,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/227. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:51.429,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",227,active,0} [rebalance:debug,2014-08-19T16:50:51.429,ns_1@10.242.238.88:<0.13069.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13171.1> [rebalance:info,2014-08-19T16:50:51.430,ns_1@10.242.238.88:<0.13069.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.431,ns_1@10.242.238.88:<0.13069.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.432,ns_1@10.242.238.88:<0.13069.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.433,ns_1@10.242.238.88:<0.10703.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 926 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:51.434,ns_1@10.242.238.88:<0.10711.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:51.438,ns_1@10.242.238.88:<0.10711.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_926_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:50:51.439,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.440,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:51.440,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.440,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.440,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{665, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:51.442,ns_1@10.242.238.88:<0.13054.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_933 [rebalance:info,2014-08-19T16:50:51.443,ns_1@10.242.238.88:<0.13054.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[933]}, {checkpoints,[{933,1}]}, {name,<<"rebalance_933">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[933]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"933"}]} [rebalance:debug,2014-08-19T16:50:51.445,ns_1@10.242.238.88:<0.13054.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13183.1> [rebalance:info,2014-08-19T16:50:51.445,ns_1@10.242.238.88:<0.13054.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.447,ns_1@10.242.238.88:<0.13054.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.447,ns_1@10.242.238.88:<0.13054.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.447,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 665 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:51.448,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 665) [rebalance:info,2014-08-19T16:50:51.448,ns_1@10.242.238.88:<0.10139.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 933 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:51.449,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:51.449,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 667 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:51.449,ns_1@10.242.238.88:<0.13186.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 667 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:debug,2014-08-19T16:50:51.450,ns_1@10.242.238.88:<0.10147.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.454,ns_1@10.242.238.88:<0.13052.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_410 [ns_server:info,2014-08-19T16:50:51.454,ns_1@10.242.238.88:<0.10147.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_933_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.455,ns_1@10.242.238.88:<0.13052.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[410]}, {checkpoints,[{410,1}]}, {name,<<"rebalance_410">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[410]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"410"}]} [rebalance:debug,2014-08-19T16:50:51.456,ns_1@10.242.238.88:<0.13052.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13189.1> [rebalance:info,2014-08-19T16:50:51.458,ns_1@10.242.238.88:<0.13052.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.459,ns_1@10.242.238.88:<0.13052.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.459,ns_1@10.242.238.88:<0.13052.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.461,ns_1@10.242.238.88:<0.11139.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 410 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:51.466,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.466,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:debug,2014-08-19T16:50:51.467,ns_1@10.242.238.88:<0.11147.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.467,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.467,ns_1@10.242.238.88:<0.13081.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_414 [ns_server:debug,2014-08-19T16:50:51.468,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.467,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{667, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:51.470,ns_1@10.242.238.88:<0.13081.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[414]}, {checkpoints,[{414,1}]}, {name,<<"rebalance_414">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[414]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"414"}]} [rebalance:debug,2014-08-19T16:50:51.471,ns_1@10.242.238.88:<0.13081.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13199.1> [ns_server:info,2014-08-19T16:50:51.471,ns_1@10.242.238.88:<0.11147.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_410_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.472,ns_1@10.242.238.88:<0.13081.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:51.473,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 667 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:debug,2014-08-19T16:50:51.474,ns_1@10.242.238.88:<0.13081.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.475,ns_1@10.242.238.88:<0.13081.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:51.475,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 667) [ns_server:debug,2014-08-19T16:50:51.475,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:51.476,ns_1@10.242.238.88:<0.10827.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 414 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:51.480,ns_1@10.242.238.88:<0.13046.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_407 [rebalance:info,2014-08-19T16:50:51.489,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 662 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:51.490,ns_1@10.242.238.88:<0.13203.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 662 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:info,2014-08-19T16:50:51.491,ns_1@10.242.238.88:<0.13046.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[407]}, {checkpoints,[{407,1}]}, {name,<<"rebalance_407">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[407]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"407"}]} [rebalance:debug,2014-08-19T16:50:51.491,ns_1@10.242.238.88:<0.10835.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:50:51.491,ns_1@10.242.238.88:<0.13046.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13211.1> [rebalance:info,2014-08-19T16:50:51.492,ns_1@10.242.238.88:<0.13046.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.494,ns_1@10.242.238.88:<0.13046.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.494,ns_1@10.242.238.88:<0.13046.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:50:51.495,ns_1@10.242.238.88:<0.10835.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_414_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.495,ns_1@10.242.238.88:<0.11370.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 407 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:51.495,ns_1@10.242.238.88:<0.13070.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_415 [rebalance:info,2014-08-19T16:50:51.496,ns_1@10.242.238.88:<0.13070.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[415]}, {checkpoints,[{415,1}]}, {name,<<"rebalance_415">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[415]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"415"}]} [rebalance:debug,2014-08-19T16:50:51.497,ns_1@10.242.238.88:<0.13070.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13222.1> [rebalance:info,2014-08-19T16:50:51.497,ns_1@10.242.238.88:<0.13070.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.499,ns_1@10.242.238.88:<0.13070.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.499,ns_1@10.242.238.88:<0.13070.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.500,ns_1@10.242.238.88:<0.10745.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 415 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:51.507,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.507,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:debug,2014-08-19T16:50:51.508,ns_1@10.242.238.88:<0.11378.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.508,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.508,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{662, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:51.508,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:51.508,ns_1@10.242.238.88:<0.10753.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:51.511,ns_1@10.242.238.88:<0.11378.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_407_'ns_1@10.242.238.90'">>] [ns_server:info,2014-08-19T16:50:51.511,ns_1@10.242.238.88:<0.10753.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_415_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:51.512,ns_1@10.242.238.88:<0.13089.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_930 [rebalance:info,2014-08-19T16:50:51.514,ns_1@10.242.238.88:<0.13089.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[930]}, {checkpoints,[{930,1}]}, {name,<<"rebalance_930">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[930]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"930"}]} [rebalance:debug,2014-08-19T16:50:51.516,ns_1@10.242.238.88:<0.13089.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13234.1> [rebalance:info,2014-08-19T16:50:51.517,ns_1@10.242.238.88:<0.13089.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.518,ns_1@10.242.238.88:<0.13089.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.518,ns_1@10.242.238.88:<0.13089.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.519,ns_1@10.242.238.88:<0.10390.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 930 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:51.520,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 662 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:51.520,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 662) [rebalance:debug,2014-08-19T16:50:51.521,ns_1@10.242.238.88:<0.10398.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.521,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:51.521,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 664 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:51.521,ns_1@10.242.238.88:<0.13237.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 664 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:info,2014-08-19T16:50:51.524,ns_1@10.242.238.88:<0.10398.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_930_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:50:51.528,ns_1@10.242.238.88:<0.13059.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_411 [rebalance:info,2014-08-19T16:50:51.530,ns_1@10.242.238.88:<0.13059.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[411]}, {checkpoints,[{411,1}]}, {name,<<"rebalance_411">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[411]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"411"}]} [rebalance:debug,2014-08-19T16:50:51.532,ns_1@10.242.238.88:<0.13059.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13241.1> [rebalance:info,2014-08-19T16:50:51.533,ns_1@10.242.238.88:<0.13059.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.534,ns_1@10.242.238.88:<0.13059.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.535,ns_1@10.242.238.88:<0.13059.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.536,ns_1@10.242.238.88:<0.11058.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 411 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:51.538,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:51.538,ns_1@10.242.238.88:<0.11066.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.539,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:51.539,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.539,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.539,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{664, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:51.543,ns_1@10.242.238.88:<0.11066.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_411_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:51.545,ns_1@10.242.238.88:<0.13076.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_922 [rebalance:info,2014-08-19T16:50:51.546,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 664 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:51.546,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 664) [rebalance:info,2014-08-19T16:50:51.547,ns_1@10.242.238.88:<0.13076.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[922]}, {checkpoints,[{922,1}]}, {name,<<"rebalance_922">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[922]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"922"}]} [ns_server:debug,2014-08-19T16:50:51.547,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:51.547,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 671 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:51.547,ns_1@10.242.238.88:<0.13253.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 671 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:debug,2014-08-19T16:50:51.547,ns_1@10.242.238.88:<0.13076.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13254.1> [rebalance:info,2014-08-19T16:50:51.548,ns_1@10.242.238.88:<0.13076.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.550,ns_1@10.242.238.88:<0.13076.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.550,ns_1@10.242.238.88:<0.13076.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.551,ns_1@10.242.238.88:<0.11002.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 922 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:51.552,ns_1@10.242.238.88:<0.11010.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:51.555,ns_1@10.242.238.88:<0.11010.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_922_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:50:51.557,ns_1@10.242.238.88:<0.13082.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_424 [rebalance:info,2014-08-19T16:50:51.558,ns_1@10.242.238.88:<0.13082.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[424]}, {checkpoints,[{424,1}]}, {name,<<"rebalance_424">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[424]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"424"}]} [rebalance:debug,2014-08-19T16:50:51.559,ns_1@10.242.238.88:<0.13082.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13258.1> [rebalance:info,2014-08-19T16:50:51.560,ns_1@10.242.238.88:<0.13082.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.561,ns_1@10.242.238.88:<0.13082.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.562,ns_1@10.242.238.88:<0.13082.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.563,ns_1@10.242.238.88:<0.10041.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 424 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:51.563,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.564,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:51.564,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.564,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.564,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{671, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:51.564,ns_1@10.242.238.88:<0.10049.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:51.567,ns_1@10.242.238.88:<0.10049.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_424_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:51.569,ns_1@10.242.238.88:<0.13055.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_419 [rebalance:info,2014-08-19T16:50:51.572,ns_1@10.242.238.88:<0.13055.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[419]}, {checkpoints,[{419,1}]}, {name,<<"rebalance_419">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[419]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"419"}]} [rebalance:debug,2014-08-19T16:50:51.572,ns_1@10.242.238.88:<0.13055.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13268.1> [rebalance:info,2014-08-19T16:50:51.573,ns_1@10.242.238.88:<0.13055.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.574,ns_1@10.242.238.88:<0.13055.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.575,ns_1@10.242.238.88:<0.13055.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.576,ns_1@10.242.238.88:<0.10446.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 419 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:51.579,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 671 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:51.580,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:51.581,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 675 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [ns_server:debug,2014-08-19T16:50:51.581,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 671) [rebalance:info,2014-08-19T16:50:51.581,ns_1@10.242.238.88:<0.13271.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 675 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:debug,2014-08-19T16:50:51.581,ns_1@10.242.238.88:<0.10454.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.583,ns_1@10.242.238.88:<0.13053.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_423 [ns_server:debug,2014-08-19T16:50:51.587,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 225. Nacking mccouch update. [views:debug,2014-08-19T16:50:51.587,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/225. Updated state: active (0) [ns_server:info,2014-08-19T16:50:51.587,ns_1@10.242.238.88:<0.10454.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_419_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:51.587,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",225,active,0} [rebalance:info,2014-08-19T16:50:51.588,ns_1@10.242.238.88:<0.13053.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[423]}, {checkpoints,[{423,1}]}, {name,<<"rebalance_423">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[423]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"423"}]} [rebalance:debug,2014-08-19T16:50:51.590,ns_1@10.242.238.88:<0.13053.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13275.1> [rebalance:info,2014-08-19T16:50:51.591,ns_1@10.242.238.88:<0.13053.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:51.591,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,823,512,457,146,691,380, 1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,690,562,507,379, 196,1001,924,796,741,613,430,302,247,975,847,664,536,481,353,170,898,770,715, 587,404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740, 612,429,301,246,118,974,846,663,535,480,352,897,769,714,586,403,275,220,948, 820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,245, 973,845,662,534,479,351,168,896,768,713,585,402,274,947,819,764,636,453,325, 142,998,870,687,559,504,376,921,793,738,610,427,299,244,116,972,844,661,533, 478,350,895,712,584,401,273,218,1023,946,818,763,635,452,324,997,869,686,558, 503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894,711, 583,400,272,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,919,791, 736,608,425,297,242,114,970,842,659,531,476,348,893,710,582,399,271,216,1021, 944,816,761,633,450,322,995,867,684,556,501,373,190,918,790,735,607,424,296, 241,969,841,658,530,475,347,164,892,709,581,398,270,1020,943,815,760,632,449, 321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112,968,840,657, 529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682, 554,499,371,188,916,788,733,605,422,294,239,967,839,656,528,473,345,162,890, 707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,915, 787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212, 1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420, 292,237,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756,628, 445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836, 653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158, 886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,233,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649, 521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674, 546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337,154,882, 699,571,388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464, 698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880,569, 258,803,748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930, 619,308,253,853,542,487,176,776,721,410,955,644,333,878,567,256,801,746,435, 124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306,251,851, 540,485,174,774,719,408,953,642,331,876,565,510,799,744,433,122,978,667,356, 901,590,279,224,824,513,458,692,381,1003,926,615,304,249,849,538,483,172,772, 717,406,951,640,329,874,563,508,797,742,431,120,976,665,354,899,588,277,222, 822,767,456] [rebalance:debug,2014-08-19T16:50:51.592,ns_1@10.242.238.88:<0.13053.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.592,ns_1@10.242.238.88:<0.13053.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.593,ns_1@10.242.238.88:<0.10104.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 423 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:51.597,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:51.597,ns_1@10.242.238.88:<0.10112.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.597,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.597,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:51.598,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{675, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:51.598,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:51.600,ns_1@10.242.238.88:<0.10112.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_423_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:51.602,ns_1@10.242.238.88:<0.13077.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_924 [rebalance:info,2014-08-19T16:50:51.603,ns_1@10.242.238.88:<0.13077.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[924]}, {checkpoints,[{924,1}]}, {name,<<"rebalance_924">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[924]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"924"}]} [rebalance:debug,2014-08-19T16:50:51.604,ns_1@10.242.238.88:<0.13077.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13286.1> [rebalance:info,2014-08-19T16:50:51.604,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 675 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:51.605,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 675) [rebalance:info,2014-08-19T16:50:51.605,ns_1@10.242.238.88:<0.13077.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:51.606,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:51.606,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 674 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:51.606,ns_1@10.242.238.88:<0.13289.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 674 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:debug,2014-08-19T16:50:51.607,ns_1@10.242.238.88:<0.13077.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.607,ns_1@10.242.238.88:<0.13077.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.608,ns_1@10.242.238.88:<0.10848.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 924 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:51.609,ns_1@10.242.238.88:<0.10856.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:51.613,ns_1@10.242.238.88:<0.10856.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_924_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:50:51.615,ns_1@10.242.238.88:<0.13075.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_418 [rebalance:info,2014-08-19T16:50:51.616,ns_1@10.242.238.88:<0.13075.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[418]}, {checkpoints,[{418,1}]}, {name,<<"rebalance_418">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[418]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"418"}]} [rebalance:debug,2014-08-19T16:50:51.617,ns_1@10.242.238.88:<0.13075.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13292.1> [rebalance:info,2014-08-19T16:50:51.618,ns_1@10.242.238.88:<0.13075.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.619,ns_1@10.242.238.88:<0.13075.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.619,ns_1@10.242.238.88:<0.13075.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.620,ns_1@10.242.238.88:<0.10528.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 418 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:51.622,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:51.622,ns_1@10.242.238.88:<0.10536.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.622,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.622,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:51.623,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.623,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{674, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:50:51.625,ns_1@10.242.238.88:<0.10536.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_418_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.628,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 674 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:51.629,ns_1@10.242.238.88:<0.13041.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_409 [ns_server:debug,2014-08-19T16:50:51.642,ns_1@10.242.238.88:<0.13080.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_425 [ns_server:debug,2014-08-19T16:50:51.643,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 674) [ns_server:debug,2014-08-19T16:50:51.644,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:51.644,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 676 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:51.644,ns_1@10.242.238.88:<0.13306.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 676 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:info,2014-08-19T16:50:51.648,ns_1@10.242.238.88:<0.13041.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[409]}, {checkpoints,[{409,1}]}, {name,<<"rebalance_409">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[409]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"409"}]} [rebalance:info,2014-08-19T16:50:51.648,ns_1@10.242.238.88:<0.13080.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[425]}, {checkpoints,[{425,1}]}, {name,<<"rebalance_425">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[425]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"425"}]} [rebalance:debug,2014-08-19T16:50:51.649,ns_1@10.242.238.88:<0.13041.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13307.1> [views:debug,2014-08-19T16:50:51.649,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/225. Updated state: active (0) [rebalance:debug,2014-08-19T16:50:51.649,ns_1@10.242.238.88:<0.13080.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13308.1> [ns_server:debug,2014-08-19T16:50:51.649,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",225,active,0} [rebalance:info,2014-08-19T16:50:51.650,ns_1@10.242.238.88:<0.13080.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:51.651,ns_1@10.242.238.88:<0.13041.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.652,ns_1@10.242.238.88:<0.13080.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.652,ns_1@10.242.238.88:<0.13080.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:debug,2014-08-19T16:50:51.653,ns_1@10.242.238.88:<0.13041.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.653,ns_1@10.242.238.88:<0.13041.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.654,ns_1@10.242.238.88:<0.9960.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 425 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:51.654,ns_1@10.242.238.88:<0.11216.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 409 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:51.660,ns_1@10.242.238.88:<0.13074.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_416 [ns_server:debug,2014-08-19T16:50:51.662,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:51.662,ns_1@10.242.238.88:<0.9968.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.662,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:51.662,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.663,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.663,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{676, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:51.663,ns_1@10.242.238.88:<0.11224.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:51.664,ns_1@10.242.238.88:<0.13074.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[416]}, {checkpoints,[{416,1}]}, {name,<<"rebalance_416">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[416]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"416"}]} [rebalance:debug,2014-08-19T16:50:51.665,ns_1@10.242.238.88:<0.13074.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13312.1> [ns_server:info,2014-08-19T16:50:51.665,ns_1@10.242.238.88:<0.9968.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_425_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.665,ns_1@10.242.238.88:<0.13074.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:50:51.667,ns_1@10.242.238.88:<0.11224.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_409_'ns_1@10.242.238.90'">>] [rebalance:debug,2014-08-19T16:50:51.668,ns_1@10.242.238.88:<0.13074.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.668,ns_1@10.242.238.88:<0.13074.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.669,ns_1@10.242.238.88:<0.10668.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 416 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:51.672,ns_1@10.242.238.88:<0.13068.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_421 [rebalance:debug,2014-08-19T16:50:51.672,ns_1@10.242.238.88:<0.10676.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:51.673,ns_1@10.242.238.88:<0.13068.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[421]}, {checkpoints,[{421,1}]}, {name,<<"rebalance_421">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[421]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"421"}]} [rebalance:debug,2014-08-19T16:50:51.673,ns_1@10.242.238.88:<0.13068.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13321.1> [rebalance:info,2014-08-19T16:50:51.674,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 676 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:50:51.674,ns_1@10.242.238.88:<0.13068.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:51.674,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 676) [ns_server:debug,2014-08-19T16:50:51.675,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:51.675,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 668 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [ns_server:info,2014-08-19T16:50:51.675,ns_1@10.242.238.88:<0.10676.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_416_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.675,ns_1@10.242.238.88:<0.13326.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 668 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:debug,2014-08-19T16:50:51.676,ns_1@10.242.238.88:<0.13068.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.677,ns_1@10.242.238.88:<0.13068.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.677,ns_1@10.242.238.88:<0.10288.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 421 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:51.682,ns_1@10.242.238.88:<0.13087.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_422 [rebalance:info,2014-08-19T16:50:51.684,ns_1@10.242.238.88:<0.13087.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[422]}, {checkpoints,[{422,1}]}, {name,<<"rebalance_422">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[422]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"422"}]} [rebalance:debug,2014-08-19T16:50:51.685,ns_1@10.242.238.88:<0.13087.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13328.1> [rebalance:info,2014-08-19T16:50:51.685,ns_1@10.242.238.88:<0.13087.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.687,ns_1@10.242.238.88:<0.13087.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.687,ns_1@10.242.238.88:<0.13087.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.688,ns_1@10.242.238.88:<0.10186.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 422 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:51.691,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.691,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:51.692,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:51.692,ns_1@10.242.238.88:<0.10296.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.692,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{668, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:51.692,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:51.694,ns_1@10.242.238.88:<0.10197.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:51.695,ns_1@10.242.238.88:<0.10296.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_421_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:51.696,ns_1@10.242.238.88:<0.13078.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_928 [ns_server:info,2014-08-19T16:50:51.697,ns_1@10.242.238.88:<0.10197.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_422_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.700,ns_1@10.242.238.88:<0.13078.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[928]}, {checkpoints,[{928,1}]}, {name,<<"rebalance_928">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[928]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"928"}]} [rebalance:debug,2014-08-19T16:50:51.700,ns_1@10.242.238.88:<0.13078.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13340.1> [rebalance:info,2014-08-19T16:50:51.701,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 668 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:50:51.701,ns_1@10.242.238.88:<0.13078.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:51.702,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 668) [ns_server:debug,2014-08-19T16:50:51.702,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:51.702,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 926 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:51.702,ns_1@10.242.238.88:<0.13343.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 926 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:debug,2014-08-19T16:50:51.703,ns_1@10.242.238.88:<0.13078.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.703,ns_1@10.242.238.88:<0.13078.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.704,ns_1@10.242.238.88:<0.10549.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 928 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:51.705,ns_1@10.242.238.88:<0.10557.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:51.708,ns_1@10.242.238.88:<0.10557.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_928_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:50:51.717,ns_1@10.242.238.88:<0.13066.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_934 [rebalance:info,2014-08-19T16:50:51.719,ns_1@10.242.238.88:<0.13066.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[934]}, {checkpoints,[{934,1}]}, {name,<<"rebalance_934">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[934]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"934"}]} [rebalance:debug,2014-08-19T16:50:51.720,ns_1@10.242.238.88:<0.13066.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13347.1> [ns_server:debug,2014-08-19T16:50:51.721,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.721,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:51.721,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:51.721,ns_1@10.242.238.88:<0.13066.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:51.722,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.722,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{926, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:50:51.723,ns_1@10.242.238.88:<0.13066.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.724,ns_1@10.242.238.88:<0.13066.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.725,ns_1@10.242.238.88:<0.10062.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 934 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:51.726,ns_1@10.242.238.88:<0.13049.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_413 [rebalance:debug,2014-08-19T16:50:51.727,ns_1@10.242.238.88:<0.10070.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:51.727,ns_1@10.242.238.88:<0.13049.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[413]}, {checkpoints,[{413,1}]}, {name,<<"rebalance_413">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[413]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"413"}]} [rebalance:debug,2014-08-19T16:50:51.729,ns_1@10.242.238.88:<0.13049.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13355.1> [rebalance:info,2014-08-19T16:50:51.731,ns_1@10.242.238.88:<0.13049.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:51.732,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 926 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:debug,2014-08-19T16:50:51.733,ns_1@10.242.238.88:<0.13049.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.733,ns_1@10.242.238.88:<0.13049.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:50:51.733,ns_1@10.242.238.88:<0.10070.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_934_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:50:51.733,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 926) [rebalance:info,2014-08-19T16:50:51.734,ns_1@10.242.238.88:<0.10890.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 413 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:51.734,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:51.734,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 933 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:51.734,ns_1@10.242.238.88:<0.13366.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 933 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:debug,2014-08-19T16:50:51.735,ns_1@10.242.238.88:<0.10898.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:51.738,ns_1@10.242.238.88:<0.10898.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_413_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:51.743,ns_1@10.242.238.88:<0.13056.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_408 [rebalance:info,2014-08-19T16:50:51.744,ns_1@10.242.238.88:<0.13056.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[408]}, {checkpoints,[{408,1}]}, {name,<<"rebalance_408">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[408]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"408"}]} [rebalance:debug,2014-08-19T16:50:51.745,ns_1@10.242.238.88:<0.13056.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13378.1> [rebalance:info,2014-08-19T16:50:51.746,ns_1@10.242.238.88:<0.13056.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.747,ns_1@10.242.238.88:<0.13056.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.747,ns_1@10.242.238.88:<0.13056.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.748,ns_1@10.242.238.88:<0.11293.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 408 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:51.752,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:51.754,ns_1@10.242.238.88:<0.11301.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.754,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.754,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:51.755,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.755,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{933, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:51.755,ns_1@10.242.238.88:<0.13027.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_917 [rebalance:info,2014-08-19T16:50:51.757,ns_1@10.242.238.88:<0.13027.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[917]}, {checkpoints,[{917,1}]}, {name,<<"rebalance_917">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[917]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"917"}]} [rebalance:debug,2014-08-19T16:50:51.757,ns_1@10.242.238.88:<0.13027.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13386.1> [ns_server:info,2014-08-19T16:50:51.758,ns_1@10.242.238.88:<0.11301.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_408_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.759,ns_1@10.242.238.88:<0.13027.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.761,ns_1@10.242.238.88:<0.13027.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.761,ns_1@10.242.238.88:<0.13027.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.762,ns_1@10.242.238.88:<0.11391.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 917 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:51.764,ns_1@10.242.238.88:<0.11399.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:51.765,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 933 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:51.766,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 933) [ns_server:debug,2014-08-19T16:50:51.767,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:51.767,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 410 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:51.767,ns_1@10.242.238.88:<0.13392.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 410 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:info,2014-08-19T16:50:51.767,ns_1@10.242.238.88:<0.11399.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_917_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:50:51.769,ns_1@10.242.238.88:<0.13050.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_680 [rebalance:info,2014-08-19T16:50:51.771,ns_1@10.242.238.88:<0.13050.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[680]}, {checkpoints,[{680,1}]}, {name,<<"rebalance_680">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[680]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"680"}]} [rebalance:debug,2014-08-19T16:50:51.771,ns_1@10.242.238.88:<0.13050.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13394.1> [rebalance:info,2014-08-19T16:50:51.773,ns_1@10.242.238.88:<0.13050.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.775,ns_1@10.242.238.88:<0.13050.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.775,ns_1@10.242.238.88:<0.13050.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.778,ns_1@10.242.238.88:<0.10006.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 680 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:51.780,ns_1@10.242.238.88:<0.13058.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_412 [rebalance:info,2014-08-19T16:50:51.781,ns_1@10.242.238.88:<0.13058.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[412]}, {checkpoints,[{412,1}]}, {name,<<"rebalance_412">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[412]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"412"}]} [rebalance:debug,2014-08-19T16:50:51.783,ns_1@10.242.238.88:<0.13058.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13396.1> [rebalance:info,2014-08-19T16:50:51.784,ns_1@10.242.238.88:<0.13058.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:51.786,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.786,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:51.786,ns_1@10.242.238.88:<0.13058.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:debug,2014-08-19T16:50:51.786,ns_1@10.242.238.88:<0.10014.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:51.786,ns_1@10.242.238.88:<0.13058.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:51.787,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:51.787,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{410, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:51.787,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:51.787,ns_1@10.242.238.88:<0.10967.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 412 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:51.789,ns_1@10.242.238.88:<0.10975.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:51.790,ns_1@10.242.238.88:<0.10014.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_680_'ns_1@10.242.238.89'">>] [ns_server:info,2014-08-19T16:50:51.794,ns_1@10.242.238.88:<0.10975.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_412_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.795,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 410 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:51.799,ns_1@10.242.238.88:<0.13039.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_679 [ns_server:debug,2014-08-19T16:50:51.809,ns_1@10.242.238.88:<0.13048.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_406 [ns_server:debug,2014-08-19T16:50:51.812,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 410) [rebalance:info,2014-08-19T16:50:51.812,ns_1@10.242.238.88:<0.13048.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[406]}, {checkpoints,[{406,1}]}, {name,<<"rebalance_406">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[406]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"406"}]} [rebalance:info,2014-08-19T16:50:51.813,ns_1@10.242.238.88:<0.13039.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[679]}, {checkpoints,[{679,1}]}, {name,<<"rebalance_679">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[679]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"679"}]} [ns_server:debug,2014-08-19T16:50:51.813,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 223. Nacking mccouch update. [views:debug,2014-08-19T16:50:51.813,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/223. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:51.813,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:debug,2014-08-19T16:50:51.813,ns_1@10.242.238.88:<0.13048.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13410.1> [ns_server:debug,2014-08-19T16:50:51.813,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",223,active,0} [rebalance:info,2014-08-19T16:50:51.813,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 414 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:51.814,ns_1@10.242.238.88:<0.13412.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 414 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:debug,2014-08-19T16:50:51.814,ns_1@10.242.238.88:<0.13039.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13413.1> [rebalance:info,2014-08-19T16:50:51.814,ns_1@10.242.238.88:<0.13048.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:51.814,ns_1@10.242.238.88:<0.13039.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.816,ns_1@10.242.238.88:<0.13039.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.816,ns_1@10.242.238.88:<0.13039.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:51.816,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,690,562,507, 379,196,1001,924,796,741,613,430,302,247,975,847,664,536,481,353,170,898,770, 715,587,404,276,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795, 740,612,429,301,246,118,974,846,663,535,480,352,897,769,714,586,403,275,220, 948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300, 245,973,845,662,534,479,351,168,896,768,713,585,402,274,947,819,764,636,453, 325,142,998,870,687,559,504,376,921,793,738,610,427,299,244,116,972,844,661, 533,478,350,895,712,584,401,273,218,1023,946,818,763,635,452,324,997,869,686, 558,503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894, 711,583,400,272,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,919, 791,736,608,425,297,242,114,970,842,659,531,476,348,893,710,582,399,271,216, 1021,944,816,761,633,450,322,995,867,684,556,501,373,190,918,790,735,607,424, 296,241,969,841,658,530,475,347,164,892,709,581,398,270,1020,943,815,760,632, 449,321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112,968,840, 657,529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865, 682,554,499,371,188,916,788,733,605,422,294,239,967,839,656,528,473,345,162, 890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498,370, 915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267, 212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603, 420,292,237,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756, 628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964, 836,653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989, 861,678,550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341, 158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494, 366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263, 208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599, 416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752, 624,441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832, 649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857, 674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337,154, 882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519, 464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880, 569,258,803,748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007, 930,619,308,253,853,542,487,176,776,721,410,955,644,333,878,567,256,801,746, 435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306,251, 851,540,485,174,774,719,408,953,642,331,876,565,510,799,744,433,122,978,667, 356,901,590,279,224,824,513,458,692,381,1003,926,615,304,249,849,538,483,172, 772,717,406,951,640,329,874,563,508,797,742,431,120,976,665,354,899,588,277, 222,822,767,456] [rebalance:debug,2014-08-19T16:50:51.817,ns_1@10.242.238.88:<0.13048.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.817,ns_1@10.242.238.88:<0.13048.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.817,ns_1@10.242.238.88:<0.10083.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 679 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:51.818,ns_1@10.242.238.88:<0.11433.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 406 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:51.819,ns_1@10.242.238.88:<0.11441.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:51.823,ns_1@10.242.238.88:<0.11441.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_406_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:50:51.824,ns_1@10.242.238.88:<0.13071.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_935 [rebalance:info,2014-08-19T16:50:51.825,ns_1@10.242.238.88:<0.13071.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[935]}, {checkpoints,[{935,1}]}, {name,<<"rebalance_935">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[935]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"935"}]} [rebalance:debug,2014-08-19T16:50:51.826,ns_1@10.242.238.88:<0.13071.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13416.1> [rebalance:info,2014-08-19T16:50:51.827,ns_1@10.242.238.88:<0.13071.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:51.827,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:51.827,ns_1@10.242.238.88:<0.10091.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.828,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.828,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:51.828,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{414, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:51.828,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:51.829,ns_1@10.242.238.88:<0.13071.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.829,ns_1@10.242.238.88:<0.13071.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.830,ns_1@10.242.238.88:<0.9981.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 935 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:info,2014-08-19T16:50:51.831,ns_1@10.242.238.88:<0.10091.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_679_'ns_1@10.242.238.89'">>] [rebalance:debug,2014-08-19T16:50:51.832,ns_1@10.242.238.88:<0.9989.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:51.835,ns_1@10.242.238.88:<0.9989.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_935_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.837,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 414 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:51.838,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 414) [ns_server:debug,2014-08-19T16:50:51.839,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:51.839,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 407 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:51.839,ns_1@10.242.238.88:<0.13430.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 407 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:51.840,ns_1@10.242.238.88:<0.13057.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_927 [rebalance:info,2014-08-19T16:50:51.843,ns_1@10.242.238.88:<0.13057.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[927]}, {checkpoints,[{927,1}]}, {name,<<"rebalance_927">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[927]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"927"}]} [rebalance:debug,2014-08-19T16:50:51.844,ns_1@10.242.238.88:<0.13057.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13431.1> [rebalance:info,2014-08-19T16:50:51.845,ns_1@10.242.238.88:<0.13057.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.847,ns_1@10.242.238.88:<0.13057.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.847,ns_1@10.242.238.88:<0.13057.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.852,ns_1@10.242.238.88:<0.10626.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 927 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:51.853,ns_1@10.242.238.88:<0.13043.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_925 [rebalance:debug,2014-08-19T16:50:51.854,ns_1@10.242.238.88:<0.10634.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.854,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.856,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:50:51.856,ns_1@10.242.238.88:<0.13043.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[925]}, {checkpoints,[{925,1}]}, {name,<<"rebalance_925">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[925]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"925"}]} [ns_server:debug,2014-08-19T16:50:51.856,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.856,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{407, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:51.856,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:51.857,ns_1@10.242.238.88:<0.13043.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13435.1> [rebalance:info,2014-08-19T16:50:51.858,ns_1@10.242.238.88:<0.13043.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:50:51.858,ns_1@10.242.238.88:<0.10634.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_927_'ns_1@10.242.238.89'">>] [rebalance:debug,2014-08-19T16:50:51.861,ns_1@10.242.238.88:<0.13043.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.861,ns_1@10.242.238.88:<0.13043.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.862,ns_1@10.242.238.88:<0.10766.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 925 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:51.862,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 407 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:51.863,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 407) [ns_server:debug,2014-08-19T16:50:51.863,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:51.863,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 415 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:51.863,ns_1@10.242.238.88:<0.13445.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 415 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [views:debug,2014-08-19T16:50:51.864,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/223. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:51.864,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",223,active,0} [rebalance:debug,2014-08-19T16:50:51.864,ns_1@10.242.238.88:<0.10774.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:50:51.868,ns_1@10.242.238.88:<0.10774.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_925_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:50:51.868,ns_1@10.242.238.88:<0.13040.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_918 [rebalance:info,2014-08-19T16:50:51.870,ns_1@10.242.238.88:<0.13040.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[918]}, {checkpoints,[{918,1}]}, {name,<<"rebalance_918">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[918]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"918"}]} [rebalance:debug,2014-08-19T16:50:51.871,ns_1@10.242.238.88:<0.13040.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13448.1> [rebalance:info,2014-08-19T16:50:51.872,ns_1@10.242.238.88:<0.13040.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.873,ns_1@10.242.238.88:<0.13040.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.873,ns_1@10.242.238.88:<0.13040.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.874,ns_1@10.242.238.88:<0.11314.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 918 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:51.876,ns_1@10.242.238.88:<0.11322.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.877,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.878,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{415, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:51.878,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.879,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:51.880,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.880,ns_1@10.242.238.88:<0.13045.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_920 [ns_server:info,2014-08-19T16:50:51.881,ns_1@10.242.238.88:<0.11322.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_918_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.881,ns_1@10.242.238.88:<0.13045.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[920]}, {checkpoints,[{920,1}]}, {name,<<"rebalance_920">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[920]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"920"}]} [rebalance:debug,2014-08-19T16:50:51.883,ns_1@10.242.238.88:<0.13045.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13459.1> [rebalance:info,2014-08-19T16:50:51.883,ns_1@10.242.238.88:<0.13045.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.885,ns_1@10.242.238.88:<0.13045.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.885,ns_1@10.242.238.88:<0.13045.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.886,ns_1@10.242.238.88:<0.11162.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 920 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:51.888,ns_1@10.242.238.88:<0.11176.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:51.889,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 415 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:51.891,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 415) [ns_server:debug,2014-08-19T16:50:51.891,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:51.891,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 930 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [ns_server:info,2014-08-19T16:50:51.891,ns_1@10.242.238.88:<0.11176.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_920_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.891,ns_1@10.242.238.88:<0.13464.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 930 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:51.894,ns_1@10.242.238.88:<0.13035.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_681 [rebalance:info,2014-08-19T16:50:51.895,ns_1@10.242.238.88:<0.13035.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[681]}, {checkpoints,[{681,1}]}, {name,<<"rebalance_681">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[681]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"681"}]} [rebalance:debug,2014-08-19T16:50:51.896,ns_1@10.242.238.88:<0.13035.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13465.1> [rebalance:info,2014-08-19T16:50:51.897,ns_1@10.242.238.88:<0.13035.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.899,ns_1@10.242.238.88:<0.13035.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.899,ns_1@10.242.238.88:<0.13035.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.900,ns_1@10.242.238.88:<0.9939.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 681 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:51.901,ns_1@10.242.238.88:<0.9947.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.908,ns_1@10.242.238.88:<0.13091.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_420 [ns_server:info,2014-08-19T16:50:51.909,ns_1@10.242.238.88:<0.9947.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_681_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.911,ns_1@10.242.238.88:<0.13091.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[420]}, {checkpoints,[{420,1}]}, {name,<<"rebalance_420">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[420]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"420"}]} [rebalance:debug,2014-08-19T16:50:51.912,ns_1@10.242.238.88:<0.13091.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13468.1> [rebalance:info,2014-08-19T16:50:51.912,ns_1@10.242.238.88:<0.13091.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.914,ns_1@10.242.238.88:<0.13091.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.914,ns_1@10.242.238.88:<0.13091.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:51.915,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.915,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:51.915,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{930, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:51.916,ns_1@10.242.238.88:<0.10365.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 420 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:51.917,ns_1@10.242.238.88:<0.10373.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.918,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.918,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:51.921,ns_1@10.242.238.88:<0.10373.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_420_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:50:51.923,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 930 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:51.924,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 930) [ns_server:debug,2014-08-19T16:50:51.925,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:51.925,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 411 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:51.925,ns_1@10.242.238.88:<0.13489.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 411 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:51.925,ns_1@10.242.238.88:<0.13031.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_673 [rebalance:info,2014-08-19T16:50:51.926,ns_1@10.242.238.88:<0.13031.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[673]}, {checkpoints,[{673,1}]}, {name,<<"rebalance_673">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[673]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"673"}]} [rebalance:debug,2014-08-19T16:50:51.927,ns_1@10.242.238.88:<0.13031.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13494.1> [rebalance:info,2014-08-19T16:50:51.928,ns_1@10.242.238.88:<0.13031.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.931,ns_1@10.242.238.88:<0.13031.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.931,ns_1@10.242.238.88:<0.13031.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.932,ns_1@10.242.238.88:<0.10570.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 673 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:51.937,ns_1@10.242.238.88:<0.13051.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_929 [rebalance:info,2014-08-19T16:50:51.938,ns_1@10.242.238.88:<0.13051.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[929]}, {checkpoints,[{929,1}]}, {name,<<"rebalance_929">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[929]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"929"}]} [ns_server:debug,2014-08-19T16:50:51.939,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:51.939,ns_1@10.242.238.88:<0.10578.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:50:51.940,ns_1@10.242.238.88:<0.13051.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13499.1> [ns_server:debug,2014-08-19T16:50:51.940,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:51.940,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{411, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:51.940,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:51.940,ns_1@10.242.238.88:<0.13051.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:50:51.943,ns_1@10.242.238.88:<0.10578.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_673_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:51.950,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 411 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:51.952,ns_1@10.242.238.88:<0.13032.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_921 [ns_server:debug,2014-08-19T16:50:51.964,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 411) [ns_server:debug,2014-08-19T16:50:51.965,ns_1@10.242.238.88:<0.13060.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_417 [rebalance:debug,2014-08-19T16:50:51.965,ns_1@10.242.238.88:<0.13051.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.966,ns_1@10.242.238.88:<0.13051.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:50:51.966,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:51.966,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 922 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:51.966,ns_1@10.242.238.88:<0.13510.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 922 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:50:51.966,ns_1@10.242.238.88:<0.13032.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[921]}, {checkpoints,[{921,1}]}, {name,<<"rebalance_921">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[921]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"921"}]} [rebalance:info,2014-08-19T16:50:51.967,ns_1@10.242.238.88:<0.13060.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[417]}, {checkpoints,[{417,1}]}, {name,<<"rebalance_417">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[417]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"417"}]} [rebalance:info,2014-08-19T16:50:51.967,ns_1@10.242.238.88:<0.10468.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 929 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:51.967,ns_1@10.242.238.88:<0.13032.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13511.1> [rebalance:debug,2014-08-19T16:50:51.967,ns_1@10.242.238.88:<0.13060.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13512.1> [rebalance:info,2014-08-19T16:50:51.968,ns_1@10.242.238.88:<0.13032.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:50:51.968,ns_1@10.242.238.88:<0.13060.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.969,ns_1@10.242.238.88:<0.10476.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:50:51.969,ns_1@10.242.238.88:<0.13032.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.969,ns_1@10.242.238.88:<0.13032.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.971,ns_1@10.242.238.88:<0.11083.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 921 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:51.972,ns_1@10.242.238.88:<0.13060.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.972,ns_1@10.242.238.88:<0.13060.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:debug,2014-08-19T16:50:51.972,ns_1@10.242.238.88:<0.11091.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:51.973,ns_1@10.242.238.88:<0.10605.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 417 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:info,2014-08-19T16:50:51.975,ns_1@10.242.238.88:<0.10476.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_929_'ns_1@10.242.238.89'">>] [ns_server:info,2014-08-19T16:50:51.976,ns_1@10.242.238.88:<0.11091.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_921_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:50:51.979,ns_1@10.242.238.88:<0.13030.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_677 [rebalance:info,2014-08-19T16:50:51.981,ns_1@10.242.238.88:<0.13030.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[677]}, {checkpoints,[{677,1}]}, {name,<<"rebalance_677">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[677]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"677"}]} [rebalance:debug,2014-08-19T16:50:51.981,ns_1@10.242.238.88:<0.13030.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13517.1> [rebalance:info,2014-08-19T16:50:51.982,ns_1@10.242.238.88:<0.13030.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:51.984,ns_1@10.242.238.88:<0.13030.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.984,ns_1@10.242.238.88:<0.13030.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.985,ns_1@10.242.238.88:<0.10261.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 677 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:51.987,ns_1@10.242.238.88:<0.10269.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.987,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:51.987,ns_1@10.242.238.88:<0.10613.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:51.989,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:51.989,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{922, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:51.990,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:51.990,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:51.991,ns_1@10.242.238.88:<0.10613.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_417_'ns_1@10.242.238.90'">>] [ns_server:info,2014-08-19T16:50:51.991,ns_1@10.242.238.88:<0.10269.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_677_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:50:51.994,ns_1@10.242.238.88:<0.13037.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_923 [rebalance:info,2014-08-19T16:50:51.995,ns_1@10.242.238.88:<0.13037.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[923]}, {checkpoints,[{923,1}]}, {name,<<"rebalance_923">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[923]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"923"}]} [rebalance:debug,2014-08-19T16:50:51.996,ns_1@10.242.238.88:<0.13037.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13530.1> [rebalance:info,2014-08-19T16:50:51.997,ns_1@10.242.238.88:<0.13037.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:50:51.997,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 221. Nacking mccouch update. [views:debug,2014-08-19T16:50:51.997,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/221. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:51.998,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",221,active,0} [rebalance:info,2014-08-19T16:50:51.998,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 922 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:51.998,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 922) [ns_server:debug,2014-08-19T16:50:51.999,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:debug,2014-08-19T16:50:51.999,ns_1@10.242.238.88:<0.13037.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:51.999,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 424 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:51.999,ns_1@10.242.238.88:<0.13037.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:51.999,ns_1@10.242.238.88:<0.13533.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 424 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:52.000,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,690,562,507, 379,196,1001,924,796,741,613,430,302,247,975,847,664,536,481,353,170,898,770, 715,587,404,276,221,949,821,766,638,455,327,144,872,689,561,506,378,1000,923, 795,740,612,429,301,246,118,974,846,663,535,480,352,897,769,714,586,403,275, 220,948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428, 300,245,973,845,662,534,479,351,168,896,768,713,585,402,274,947,819,764,636, 453,325,142,998,870,687,559,504,376,921,793,738,610,427,299,244,116,972,844, 661,533,478,350,895,712,584,401,273,218,1023,946,818,763,635,452,324,997,869, 686,558,503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166, 894,711,583,400,272,1022,945,817,762,634,451,323,140,996,868,685,557,502,374, 919,791,736,608,425,297,242,114,970,842,659,531,476,348,893,710,582,399,271, 216,1021,944,816,761,633,450,322,995,867,684,556,501,373,190,918,790,735,607, 424,296,241,969,841,658,530,475,347,164,892,709,581,398,270,1020,943,815,760, 632,449,321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112,968, 840,657,529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993, 865,682,554,499,371,188,916,788,733,605,422,294,239,967,839,656,528,473,345, 162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498, 370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395, 267,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731, 603,420,292,237,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811, 756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108, 964,836,653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316, 989,861,678,550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469, 341,158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549, 494,366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391, 263,208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727, 599,416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807, 752,624,441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960, 832,649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985, 857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337, 154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830, 519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335, 880,569,258,803,748,437,126,982,671,360,905,594,283,228,828,517,462,696,385, 1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333,878,567,256,801, 746,435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306, 251,851,540,485,174,774,719,408,953,642,331,876,565,510,799,744,433,122,978, 667,356,901,590,279,224,824,513,458,692,381,1003,926,615,304,249,849,538,483, 172,772,717,406,951,640,329,874,563,508,797,742,431,120,976,665,354,899,588, 277,222,822,767,456] [rebalance:info,2014-08-19T16:50:52.001,ns_1@10.242.238.88:<0.10925.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 923 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:52.003,ns_1@10.242.238.88:<0.10933.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:52.006,ns_1@10.242.238.88:<0.13044.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_916 [ns_server:info,2014-08-19T16:50:52.006,ns_1@10.242.238.88:<0.10933.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_923_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:52.009,ns_1@10.242.238.88:<0.13044.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[916]}, {checkpoints,[{916,1}]}, {name,<<"rebalance_916">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[916]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"916"}]} [rebalance:debug,2014-08-19T16:50:52.010,ns_1@10.242.238.88:<0.13044.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13536.1> [rebalance:info,2014-08-19T16:50:52.011,ns_1@10.242.238.88:<0.13044.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:52.012,ns_1@10.242.238.88:<0.13044.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:52.013,ns_1@10.242.238.88:<0.13044.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:52.014,ns_1@10.242.238.88:<0.11468.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 916 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:52.015,ns_1@10.242.238.88:<0.11476.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:52.018,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:52.019,ns_1@10.242.238.88:<0.11476.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_916_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:50:52.019,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.019,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.019,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{424, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.020,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.021,ns_1@10.242.238.88:<0.13029.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_670 [rebalance:info,2014-08-19T16:50:52.026,ns_1@10.242.238.88:<0.13029.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[670]}, {checkpoints,[{670,1}]}, {name,<<"rebalance_670">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[670]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"670"}]} [rebalance:info,2014-08-19T16:50:52.026,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 424 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:debug,2014-08-19T16:50:52.026,ns_1@10.242.238.88:<0.13029.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13548.1> [ns_server:debug,2014-08-19T16:50:52.027,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 424) [ns_server:debug,2014-08-19T16:50:52.027,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.027,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 419 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:52.027,ns_1@10.242.238.88:<0.13550.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 419 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:info,2014-08-19T16:50:52.028,ns_1@10.242.238.88:<0.13029.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:52.030,ns_1@10.242.238.88:<0.13029.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:52.030,ns_1@10.242.238.88:<0.13029.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:52.031,ns_1@10.242.238.88:<0.10800.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 670 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:52.040,ns_1@10.242.238.88:<0.10814.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:52.041,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.041,ns_1@10.242.238.88:<0.13092.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_932 [ns_server:debug,2014-08-19T16:50:52.042,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{419, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.042,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.042,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.042,ns_1@10.242.238.88:<0.13092.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[932]}, {checkpoints,[{932,1}]}, {name,<<"rebalance_932">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[932]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"932"}]} [rebalance:debug,2014-08-19T16:50:52.043,ns_1@10.242.238.88:<0.13092.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13553.1> [ns_server:debug,2014-08-19T16:50:52.043,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.045,ns_1@10.242.238.88:<0.13092.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:50:52.046,ns_1@10.242.238.88:<0.10814.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_670_'ns_1@10.242.238.89'">>] [rebalance:debug,2014-08-19T16:50:52.047,ns_1@10.242.238.88:<0.13092.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:52.048,ns_1@10.242.238.88:<0.13092.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:52.049,ns_1@10.242.238.88:<0.10216.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 932 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:52.049,ns_1@10.242.238.88:<0.13034.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_672 [rebalance:info,2014-08-19T16:50:52.051,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 419 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:50:52.051,ns_1@10.242.238.88:<0.13034.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[672]}, {checkpoints,[{672,1}]}, {name,<<"rebalance_672">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[672]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"672"}]} [rebalance:debug,2014-08-19T16:50:52.051,ns_1@10.242.238.88:<0.13034.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13562.1> [ns_server:debug,2014-08-19T16:50:52.051,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 419) [rebalance:info,2014-08-19T16:50:52.052,ns_1@10.242.238.88:<0.13034.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:52.052,ns_1@10.242.238.88:<0.10224.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:52.052,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.052,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 423 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:52.052,ns_1@10.242.238.88:<0.13564.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 423 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:debug,2014-08-19T16:50:52.054,ns_1@10.242.238.88:<0.13034.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:52.054,ns_1@10.242.238.88:<0.13034.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:50:52.056,ns_1@10.242.238.88:<0.10224.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_932_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:52.059,ns_1@10.242.238.88:<0.10647.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 672 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:50:52.065,ns_1@10.242.238.88:<0.13033.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_666 [rebalance:info,2014-08-19T16:50:52.067,ns_1@10.242.238.88:<0.13033.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[666]}, {checkpoints,[{666,1}]}, {name,<<"rebalance_666">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[666]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"666"}]} [rebalance:debug,2014-08-19T16:50:52.068,ns_1@10.242.238.88:<0.13033.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13568.1> [rebalance:info,2014-08-19T16:50:52.069,ns_1@10.242.238.88:<0.13033.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:52.070,ns_1@10.242.238.88:<0.13033.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:52.071,ns_1@10.242.238.88:<0.13033.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:52.072,ns_1@10.242.238.88:<0.11118.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 666 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [views:debug,2014-08-19T16:50:52.073,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/221. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:52.073,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",221,active,0} [ns_server:debug,2014-08-19T16:50:52.076,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:52.077,ns_1@10.242.238.88:<0.10655.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:52.077,ns_1@10.242.238.88:<0.13079.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_931 [ns_server:debug,2014-08-19T16:50:52.077,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.078,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.078,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{423, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.078,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:50:52.078,ns_1@10.242.238.88:<0.11126.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:52.079,ns_1@10.242.238.88:<0.13079.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[931]}, {checkpoints,[{931,1}]}, {name,<<"rebalance_931">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[931]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"931"}]} [rebalance:debug,2014-08-19T16:50:52.079,ns_1@10.242.238.88:<0.13079.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13576.1> [ns_server:info,2014-08-19T16:50:52.082,ns_1@10.242.238.88:<0.10655.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_672_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:50:52.082,ns_1@10.242.238.88:<0.13079.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:50:52.083,ns_1@10.242.238.88:<0.11126.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_666_'ns_1@10.242.238.89'">>] [rebalance:debug,2014-08-19T16:50:52.085,ns_1@10.242.238.88:<0.13079.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:52.085,ns_1@10.242.238.88:<0.13079.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:52.086,ns_1@10.242.238.88:<0.10309.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 931 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:info,2014-08-19T16:50:52.086,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 423 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.087,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 423) [ns_server:debug,2014-08-19T16:50:52.088,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.088,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 924 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:debug,2014-08-19T16:50:52.088,ns_1@10.242.238.88:<0.10317.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:50:52.088,ns_1@10.242.238.88:<0.13583.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 924 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:info,2014-08-19T16:50:52.091,ns_1@10.242.238.88:<0.10317.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_931_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:50:52.097,ns_1@10.242.238.88:<0.13038.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_919 [rebalance:info,2014-08-19T16:50:52.099,ns_1@10.242.238.88:<0.13038.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[919]}, {checkpoints,[{919,1}]}, {name,<<"rebalance_919">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[919]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"919"}]} [rebalance:debug,2014-08-19T16:50:52.100,ns_1@10.242.238.88:<0.13038.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13587.1> [rebalance:info,2014-08-19T16:50:52.100,ns_1@10.242.238.88:<0.13038.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:52.103,ns_1@10.242.238.88:<0.13038.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:52.103,ns_1@10.242.238.88:<0.13038.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:52.104,ns_1@10.242.238.88:<0.11251.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 919 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:52.105,ns_1@10.242.238.88:<0.11259.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:52.106,ns_1@10.242.238.88:<0.13042.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_678 [rebalance:info,2014-08-19T16:50:52.108,ns_1@10.242.238.88:<0.13042.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[678]}, {checkpoints,[{678,1}]}, {name,<<"rebalance_678">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[678]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"678"}]} [ns_server:info,2014-08-19T16:50:52.108,ns_1@10.242.238.88:<0.11259.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_919_'ns_1@10.242.238.89'">>] [rebalance:debug,2014-08-19T16:50:52.109,ns_1@10.242.238.88:<0.13042.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.13590.1> [rebalance:info,2014-08-19T16:50:52.110,ns_1@10.242.238.88:<0.13042.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:50:52.111,ns_1@10.242.238.88:<0.13042.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:50:52.111,ns_1@10.242.238.88:<0.13042.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:50:52.112,ns_1@10.242.238.88:<0.10160.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 678 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:50:52.114,ns_1@10.242.238.88:<0.10168.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:50:52.117,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.117,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:50:52.117,ns_1@10.242.238.88:<0.10168.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_678_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:50:52.118,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.118,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{924, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.119,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.124,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 924 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.125,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 924) [ns_server:debug,2014-08-19T16:50:52.125,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.126,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 418 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:52.126,ns_1@10.242.238.88:<0.13602.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 418 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:52.144,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.144,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.145,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.145,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{418, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.145,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.155,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 418 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.155,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 418) [ns_server:debug,2014-08-19T16:50:52.156,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.156,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 425 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:52.156,ns_1@10.242.238.88:<0.13627.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 425 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:52.169,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.170,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.170,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{425, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.170,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.171,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.190,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 219. Nacking mccouch update. [views:debug,2014-08-19T16:50:52.190,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/219. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:52.190,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",219,active,0} [ns_server:debug,2014-08-19T16:50:52.192,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562,507,196, 924,796,741,613,430,302,247,975,847,664,536,481,353,170,898,770,715,587,404, 276,221,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612, 429,301,246,118,974,846,663,535,480,352,897,769,714,586,403,275,220,948,820, 765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,245,973, 845,662,534,479,351,168,896,768,713,585,402,274,219,947,819,764,636,453,325, 142,998,870,687,559,504,376,921,793,738,610,427,299,244,116,972,844,661,533, 478,350,895,712,584,401,273,218,1023,946,818,763,635,452,324,997,869,686,558, 503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894,711, 583,400,272,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,919,791, 736,608,425,297,242,114,970,842,659,531,476,348,893,710,582,399,271,216,1021, 944,816,761,633,450,322,995,867,684,556,501,373,190,918,790,735,607,424,296, 241,969,841,658,530,475,347,164,892,709,581,398,270,1020,943,815,760,632,449, 321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112,968,840,657, 529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682, 554,499,371,188,916,788,733,605,422,294,239,967,839,656,528,473,345,162,890, 707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,915, 787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212, 1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420, 292,237,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756,628, 445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836, 653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158, 886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,233,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649, 521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674, 546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337,154,882, 699,571,388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464, 698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880,569, 258,803,748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930, 619,308,253,853,542,487,176,776,721,410,955,644,333,878,567,256,801,746,435, 124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306,251,851, 540,485,174,774,719,408,953,642,331,876,565,510,799,744,433,122,978,667,356, 901,590,279,224,824,513,458,692,381,1003,926,615,304,249,849,538,483,172,772, 717,406,951,640,329,874,563,508,797,742,431,120,976,665,354,899,588,277,222, 822,767,456,690,379,1001] [rebalance:info,2014-08-19T16:50:52.196,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 425 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.197,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 425) [ns_server:debug,2014-08-19T16:50:52.197,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.197,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 409 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:52.197,ns_1@10.242.238.88:<0.13638.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 409 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:52.214,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.215,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{409, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.215,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.215,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.215,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.222,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 409 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.223,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 409) [ns_server:debug,2014-08-19T16:50:52.223,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.223,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 416 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:52.224,ns_1@10.242.238.88:<0.13649.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 416 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:52.241,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.242,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{416, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.242,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.242,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.242,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.255,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 416 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.256,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 416) [ns_server:debug,2014-08-19T16:50:52.257,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.257,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 421 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:52.257,ns_1@10.242.238.88:<0.13660.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 421 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [views:debug,2014-08-19T16:50:52.257,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/219. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:52.257,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",219,active,0} [ns_server:debug,2014-08-19T16:50:52.270,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.271,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.271,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.271,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{421, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.271,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.279,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 421 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.280,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 421) [ns_server:debug,2014-08-19T16:50:52.280,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.280,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 422 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:52.281,ns_1@10.242.238.88:<0.13671.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 422 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:52.296,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.297,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.297,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.297,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{422, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.297,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.306,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 422 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.307,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 422) [ns_server:debug,2014-08-19T16:50:52.307,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.307,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 928 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:52.308,ns_1@10.242.238.88:<0.13682.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 928 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:52.330,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.330,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.331,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.331,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{928, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.331,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.341,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 928 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.342,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 928) [ns_server:debug,2014-08-19T16:50:52.342,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.342,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 934 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:52.343,ns_1@10.242.238.88:<0.13707.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 934 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:52.360,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.360,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.361,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{934, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.361,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.361,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.367,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 934 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.368,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 934) [ns_server:debug,2014-08-19T16:50:52.369,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.369,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 413 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:52.369,ns_1@10.242.238.88:<0.13718.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 413 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:52.388,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.388,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.388,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.389,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.389,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{413, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:52.397,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 413 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.397,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 217. Nacking mccouch update. [views:debug,2014-08-19T16:50:52.397,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/217. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:52.397,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",217,active,0} [ns_server:debug,2014-08-19T16:50:52.398,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 413) [ns_server:debug,2014-08-19T16:50:52.398,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.398,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 408 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:52.398,ns_1@10.242.238.88:<0.13729.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 408 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:52.400,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562,507,196, 924,796,741,613,430,302,247,975,847,664,536,481,353,170,898,770,715,587,404, 276,221,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612, 429,301,246,118,974,846,663,535,480,352,897,769,714,586,403,275,220,948,820, 765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,245,973, 845,662,534,479,351,168,896,768,713,585,402,274,219,947,819,764,636,453,325, 142,998,870,687,559,504,376,921,793,738,610,427,299,244,116,972,844,661,533, 478,350,895,712,584,401,273,218,1023,946,818,763,635,452,324,997,869,686,558, 503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894,711, 583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,919, 791,736,608,425,297,242,114,970,842,659,531,476,348,893,710,582,399,271,216, 1021,944,816,761,633,450,322,995,867,684,556,501,373,190,918,790,735,607,424, 296,241,969,841,658,530,475,347,164,892,709,581,398,270,1020,943,815,760,632, 449,321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112,968,840, 657,529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865, 682,554,499,371,188,916,788,733,605,422,294,239,967,839,656,528,473,345,162, 890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498,370, 915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267, 212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603, 420,292,237,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811,756, 628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964, 836,653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989, 861,678,550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341, 158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549,494, 366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263, 208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599, 416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752, 624,441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832, 649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857, 674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337,154, 882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519, 464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880, 569,258,803,748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007, 930,619,308,253,853,542,487,176,776,721,410,955,644,333,878,567,256,801,746, 435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306,251, 851,540,485,174,774,719,408,953,642,331,876,565,510,799,744,433,122,978,667, 356,901,590,279,224,824,513,458,692,381,1003,926,615,304,249,849,538,483,172, 772,717,406,951,640,329,874,563,508,797,742,431,120,976,665,354,899,588,277, 222,822,767,456,690,379,1001] [ns_server:debug,2014-08-19T16:50:52.424,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.424,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.425,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.425,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.425,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{408, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:52.431,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 408 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.432,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 408) [ns_server:debug,2014-08-19T16:50:52.433,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.433,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 917 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:52.433,ns_1@10.242.238.88:<0.13740.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 917 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:52.450,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.451,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.451,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.451,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.451,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{917, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:52.460,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 917 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.461,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 917) [ns_server:debug,2014-08-19T16:50:52.461,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.461,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 680 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:52.461,ns_1@10.242.238.88:<0.13751.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 680 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [views:debug,2014-08-19T16:50:52.464,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/217. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:52.465,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",217,active,0} [ns_server:debug,2014-08-19T16:50:52.477,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.478,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.479,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.478,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{680, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.479,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.485,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 680 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.485,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 680) [ns_server:debug,2014-08-19T16:50:52.486,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.486,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 412 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:52.486,ns_1@10.242.238.88:<0.13761.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 412 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:52.506,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.507,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.507,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.507,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{412, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.507,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.516,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 412 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.517,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 412) [ns_server:debug,2014-08-19T16:50:52.518,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.518,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 406 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:52.518,ns_1@10.242.238.88:<0.13772.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 406 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:52.534,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.535,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.535,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{406, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.535,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.536,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.542,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 406 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.542,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 406) [ns_server:debug,2014-08-19T16:50:52.543,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.543,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 679 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:52.543,ns_1@10.242.238.88:<0.13797.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 679 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:52.561,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.562,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.562,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{679, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.581,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.581,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.588,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 679 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.589,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.589,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 935 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:52.590,ns_1@10.242.238.88:<0.13808.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 935 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:52.590,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 679) [ns_server:debug,2014-08-19T16:50:52.607,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.608,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{935, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.609,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.609,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.609,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.617,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 935 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.617,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 935) [ns_server:debug,2014-08-19T16:50:52.618,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.618,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 927 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:52.618,ns_1@10.242.238.88:<0.13819.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 927 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:52.623,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 215. Nacking mccouch update. [views:debug,2014-08-19T16:50:52.623,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/215. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:52.624,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",215,active,0} [ns_server:debug,2014-08-19T16:50:52.626,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562,507,196, 924,796,741,613,430,302,247,975,847,664,536,481,353,170,898,770,715,587,404, 276,221,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612, 429,301,246,118,974,846,663,535,480,352,897,769,714,586,403,275,220,948,820, 765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,245,973, 845,662,534,479,351,168,896,768,713,585,402,274,219,947,819,764,636,453,325, 142,998,870,687,559,504,376,921,793,738,610,427,299,244,116,972,844,661,533, 478,350,895,712,584,401,273,218,1023,946,818,763,635,452,324,997,869,686,558, 503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894,711, 583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,919, 791,736,608,425,297,242,114,970,842,659,531,476,348,893,710,582,399,271,216, 1021,944,816,761,633,450,322,995,867,684,556,501,373,190,918,790,735,607,424, 296,241,969,841,658,530,475,347,164,892,709,581,398,270,215,1020,943,815,760, 632,449,321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112,968, 840,657,529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993, 865,682,554,499,371,188,916,788,733,605,422,294,239,967,839,656,528,473,345, 162,890,707,579,396,268,1018,941,813,758,630,447,319,136,992,864,681,553,498, 370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395, 267,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731, 603,420,292,237,965,837,654,526,471,343,160,888,705,577,394,266,1016,939,811, 756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108, 964,836,653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316, 989,861,678,550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469, 341,158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677,549, 494,366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391, 263,208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727, 599,416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807, 752,624,441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960, 832,649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985, 857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337, 154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830, 519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335, 880,569,258,803,748,437,126,982,671,360,905,594,283,228,828,517,462,696,385, 1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333,878,567,256,801, 746,435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306, 251,851,540,485,174,774,719,408,953,642,331,876,565,510,799,744,433,122,978, 667,356,901,590,279,224,824,513,458,692,381,1003,926,615,304,249,849,538,483, 172,772,717,406,951,640,329,874,563,508,797,742,431,120,976,665,354,899,588, 277,222,822,767,456,690,379,1001] [ns_server:debug,2014-08-19T16:50:52.636,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.637,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.637,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{927, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.638,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.638,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:50:52.647,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 927 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.648,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 927) [ns_server:debug,2014-08-19T16:50:52.649,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.649,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 925 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:52.650,ns_1@10.242.238.88:<0.13830.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 925 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:52.667,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.668,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.668,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.669,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.669,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{925, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:52.674,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 925 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.676,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.676,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 918 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:52.676,ns_1@10.242.238.88:<0.13841.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 918 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:52.679,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 925) [ns_server:debug,2014-08-19T16:50:52.693,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.694,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.694,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.695,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{918, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.695,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:50:52.699,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/215. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:52.699,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",215,active,0} [rebalance:info,2014-08-19T16:50:52.704,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 918 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.705,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 918) [ns_server:debug,2014-08-19T16:50:52.705,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.705,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 920 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:52.706,ns_1@10.242.238.88:<0.13851.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 920 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:52.732,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.733,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.733,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{920, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.733,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.733,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.739,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 920 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.739,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 920) [ns_server:debug,2014-08-19T16:50:52.740,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.740,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 681 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:52.740,ns_1@10.242.238.88:<0.13863.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 681 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:52.757,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.758,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{681, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.758,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.758,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.758,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.764,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 681 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.765,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 681) [ns_server:debug,2014-08-19T16:50:52.765,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.765,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 420 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:52.765,ns_1@10.242.238.88:<0.13873.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 420 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:52.785,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.786,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.786,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{420, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.787,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.787,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.794,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 420 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.795,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 420) [ns_server:debug,2014-08-19T16:50:52.796,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.796,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 673 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:52.796,ns_1@10.242.238.88:<0.13898.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 673 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:52.813,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.813,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.814,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.814,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.814,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{673, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:50:52.828,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 673 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.828,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 673) [ns_server:debug,2014-08-19T16:50:52.829,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.829,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 929 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:52.829,ns_1@10.242.238.88:<0.13909.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 929 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:52.847,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.848,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.848,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.848,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{929, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.848,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.854,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 929 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.855,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 929) [ns_server:debug,2014-08-19T16:50:52.856,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.856,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 921 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:52.856,ns_1@10.242.238.88:<0.13920.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 921 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:52.865,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 213. Nacking mccouch update. [views:debug,2014-08-19T16:50:52.866,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/213. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:52.866,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",213,active,0} [ns_server:debug,2014-08-19T16:50:52.868,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562,507,196, 924,796,741,613,430,302,247,975,847,664,536,481,353,170,898,770,715,587,404, 276,221,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612, 429,301,246,118,974,846,663,535,480,352,897,769,714,586,403,275,220,948,820, 765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,245,973, 845,662,534,479,351,168,896,768,713,585,402,274,219,947,819,764,636,453,325, 142,998,870,687,559,504,376,921,793,738,610,427,299,244,116,972,844,661,533, 478,350,895,712,584,401,273,218,1023,946,818,763,635,452,324,997,869,686,558, 503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894,711, 583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,919, 791,736,608,425,297,242,114,970,842,659,531,476,348,893,710,582,399,271,216, 1021,944,816,761,633,450,322,995,867,684,556,501,373,190,918,790,735,607,424, 296,241,969,841,658,530,475,347,164,892,709,581,398,270,215,1020,943,815,760, 632,449,321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112,968, 840,657,529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993, 865,682,554,499,371,188,916,788,733,605,422,294,239,967,839,656,528,473,345, 162,890,707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681,553, 498,370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578, 395,267,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786, 731,603,420,292,237,965,837,654,526,471,343,160,888,705,577,394,266,1016,939, 811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236, 108,964,836,653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444, 316,989,861,678,550,495,367,184,912,784,729,601,418,290,235,963,835,652,524, 469,341,158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860,677, 549,494,366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574, 391,263,208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782, 727,599,416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,1012,935, 807,752,624,441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232, 960,832,649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312, 985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465, 337,154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,285,230, 830,519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646, 335,880,569,258,803,748,437,126,982,671,360,905,594,283,228,828,517,462,696, 385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333,878,567,256, 801,746,435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928,617, 306,251,851,540,485,174,774,719,408,953,642,331,876,565,510,799,744,433,122, 978,667,356,901,590,279,224,824,513,458,692,381,1003,926,615,304,249,849,538, 483,172,772,717,406,951,640,329,874,563,508,797,742,431,120,976,665,354,899, 588,277,222,822,767,456,690,379,1001] [ns_server:debug,2014-08-19T16:50:52.877,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.877,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.878,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.878,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{921, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.878,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:50:52.890,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 921 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.891,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 921) [ns_server:debug,2014-08-19T16:50:52.892,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.892,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 417 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:50:52.892,ns_1@10.242.238.88:<0.13931.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 417 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:50:52.911,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.912,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.912,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.912,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{417, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.912,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.919,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 417 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.920,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 417) [ns_server:debug,2014-08-19T16:50:52.921,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.921,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 677 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:52.921,ns_1@10.242.238.88:<0.13942.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 677 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:52.936,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.937,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.937,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.937,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{677, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.938,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.944,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 677 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.944,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 677) [ns_server:debug,2014-08-19T16:50:52.945,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.945,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 923 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:52.945,ns_1@10.242.238.88:<0.13953.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 923 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [views:debug,2014-08-19T16:50:52.950,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/213. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:52.950,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",213,active,0} [ns_server:debug,2014-08-19T16:50:52.954,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,916, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_916_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_916_'ns_1@10.242.238.89'">>}]}, {move_state,919, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_919_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_919_'ns_1@10.242.238.89'">>}]}, {move_state,666, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_666_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_666_'ns_1@10.242.238.89'">>}]}, {move_state,923, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_923_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_923_'ns_1@10.242.238.89'">>}]}, {move_state,670, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_670_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_670_'ns_1@10.242.238.89'">>}]}, {move_state,672, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_672_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_672_'ns_1@10.242.238.89'">>}]}, {move_state,931, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_931_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_931_'ns_1@10.242.238.89'">>}]}, {move_state,932, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_932_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_932_'ns_1@10.242.238.89'">>}]}, {move_state,678, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_678_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_678_'ns_1@10.242.238.89'">>}]}] [ns_server:debug,2014-08-19T16:50:52.965,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.965,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.966,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.966,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{923, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.966,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:52.973,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 923 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:52.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 923) [ns_server:debug,2014-08-19T16:50:52.975,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:52.975,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 916 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:52.975,ns_1@10.242.238.88:<0.13973.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 916 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:52.992,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.994,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{916, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:52.994,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:52.994,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:52.994,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:53.008,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 916 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:53.009,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 916) [ns_server:debug,2014-08-19T16:50:53.010,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:53.010,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 670 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:53.010,ns_1@10.242.238.88:<0.13984.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 670 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:53.027,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.027,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{670, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:53.028,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:53.028,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.028,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:53.035,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 670 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:53.035,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 670) [ns_server:debug,2014-08-19T16:50:53.036,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:53.036,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 932 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:53.036,ns_1@10.242.238.88:<0.14000.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 932 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:53.055,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.056,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.056,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{932, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:53.056,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:53.056,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:53.069,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 932 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:53.070,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 932) [ns_server:debug,2014-08-19T16:50:53.071,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:53.071,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 672 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:53.071,ns_1@10.242.238.88:<0.14020.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 672 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:53.090,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.090,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.091,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:53.091,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{672, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:53.091,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:53.097,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 672 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:53.098,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 672) [ns_server:debug,2014-08-19T16:50:53.098,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:53.098,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 666 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:53.098,ns_1@10.242.238.88:<0.14031.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 666 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:53.114,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.115,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:53.115,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.115,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{666, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:53.115,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:53.122,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 666 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:53.122,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 666) [ns_server:debug,2014-08-19T16:50:53.123,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:53.123,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 931 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:53.123,ns_1@10.242.238.88:<0.14041.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 931 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:53.133,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 211. Nacking mccouch update. [views:debug,2014-08-19T16:50:53.133,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/211. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:53.133,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",211,active,0} [ns_server:debug,2014-08-19T16:50:53.135,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562,507,196, 924,796,741,613,430,302,247,975,847,664,536,481,353,170,898,770,715,587,404, 276,221,949,821,766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612, 429,301,246,118,974,846,663,535,480,352,897,769,714,586,403,275,220,948,820, 765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,245,973, 845,662,534,479,351,168,896,768,713,585,402,274,219,947,819,764,636,453,325, 142,998,870,687,559,504,376,921,793,738,610,427,299,244,116,972,844,661,533, 478,350,895,712,584,401,273,218,1023,946,818,763,635,452,324,997,869,686,558, 503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894,711, 583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,919, 791,736,608,425,297,242,114,970,842,659,531,476,348,893,710,582,399,271,216, 1021,944,816,761,633,450,322,995,867,684,556,501,373,190,918,790,735,607,424, 296,241,969,841,658,530,475,347,164,892,709,581,398,270,215,1020,943,815,760, 632,449,321,138,994,866,683,555,500,372,917,789,734,606,423,295,240,112,968, 840,657,529,474,346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993, 865,682,554,499,371,188,916,788,733,605,422,294,239,967,839,656,528,473,345, 162,890,707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681,553, 498,370,915,787,732,604,421,293,238,110,966,838,655,527,472,344,889,706,578, 395,267,212,1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786, 731,603,420,292,237,965,837,654,526,471,343,160,888,705,577,394,266,211,1016, 939,811,756,628,445,317,134,990,862,679,551,496,368,913,785,730,602,419,291, 236,108,964,836,653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627, 444,316,989,861,678,550,495,367,184,912,784,729,601,418,290,235,963,835,652, 524,469,341,158,886,703,575,392,264,1014,937,809,754,626,443,315,132,988,860, 677,549,494,366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702, 574,391,263,208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910, 782,727,599,416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,1012, 935,807,752,624,441,313,130,986,858,675,547,492,364,909,781,726,598,415,287, 232,960,832,649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440, 312,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520, 465,337,154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,285, 230,830,519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957, 646,335,880,569,258,803,748,437,126,982,671,360,905,594,283,228,828,517,462, 696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333,878,567, 256,801,746,435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928, 617,306,251,851,540,485,174,774,719,408,953,642,331,876,565,510,799,744,433, 122,978,667,356,901,590,279,224,824,513,458,692,381,1003,926,615,304,249,849, 538,483,172,772,717,406,951,640,329,874,563,508,797,742,431,120,976,665,354, 899,588,277,222,822,767,456,690,379,1001] [ns_server:debug,2014-08-19T16:50:53.146,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.148,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.148,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:53.148,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{931, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:53.149,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:53.157,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 931 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:53.158,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 931) [ns_server:debug,2014-08-19T16:50:53.159,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:53.159,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 919 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:50:53.159,ns_1@10.242.238.88:<0.14057.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 919 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:50:53.178,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.179,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{919, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:53.179,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.179,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:53.180,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:53.190,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 919 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:50:53.190,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 919) [ns_server:debug,2014-08-19T16:50:53.191,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:50:53.191,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 678 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:50:53.191,ns_1@10.242.238.88:<0.14068.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 678 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:50:53.209,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.210,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:50:53.210,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:50:53.210,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{678, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:50:53.210,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:50:53.217,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 678 done. Will delete it on: ['ns_1@10.242.238.88'] [views:debug,2014-08-19T16:50:53.217,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/211. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:53.217,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",211,active,0} [ns_server:debug,2014-08-19T16:50:53.217,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 678) [ns_server:debug,2014-08-19T16:50:53.219,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{compact,'ns_1@10.242.238.88'}] [ns_server:debug,2014-08-19T16:50:53.219,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"default">>] [ns_server:debug,2014-08-19T16:50:53.220,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:compact_next_bucket:1453]Going to spawn bucket compaction with forced view compaction for bucket default [ns_server:debug,2014-08-19T16:50:53.220,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:compact_next_bucket:1482]Spawned 'uninhibited' compaction for default [ns_server:info,2014-08-19T16:50:53.222,ns_1@10.242.238.88:<0.14079.1>:compaction_daemon:check_all_dbs_exist:1611]Skipping compaction of bucket `default` since at least database `default/0` seems to be missing. [ns_server:debug,2014-08-19T16:50:53.223,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:50:53.223,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:handle_info:203]noted compaction done: {compact,'ns_1@10.242.238.88'} [ns_server:debug,2014-08-19T16:50:53.223,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:50:53.226,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{915, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:53.226,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",915, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.14080.1>) [ns_server:debug,2014-08-19T16:50:53.226,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 915) [ns_server:debug,2014-08-19T16:50:53.226,ns_1@10.242.238.88:<0.14081.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:53.226,ns_1@10.242.238.88:<0.14080.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 915 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:53.227,ns_1@10.242.238.88:<0.14086.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 915 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:53.227,ns_1@10.242.238.88:<0.14087.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 915 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:53.230,ns_1@10.242.238.88:<0.14088.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 915 into 'ns_1@10.242.238.89' is <18124.29719.0> [ns_server:debug,2014-08-19T16:50:53.232,ns_1@10.242.238.88:<0.14088.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 915 into 'ns_1@10.242.238.91' is <18126.27294.0> [rebalance:debug,2014-08-19T16:50:53.232,ns_1@10.242.238.88:<0.14080.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 915 is <0.14088.1> [ns_server:debug,2014-08-19T16:50:53.259,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,250384}, tap_estimate, {replica_building,"default",915,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29719.0>, <<"replication_building_915_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:53.282,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,273138}, tap_estimate, {replica_building,"default",915,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27294.0>, <<"replication_building_915_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:53.282,ns_1@10.242.238.88:<0.14089.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27294.0>}, {'ns_1@10.242.238.89',<18124.29719.0>}]) [rebalance:info,2014-08-19T16:50:53.282,ns_1@10.242.238.88:<0.14080.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:53.283,ns_1@10.242.238.88:<0.14080.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 915 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:53.283,ns_1@10.242.238.88:<0.14080.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:53.284,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{915, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:53.287,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{661, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:53.287,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",661, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.14101.1>) [ns_server:debug,2014-08-19T16:50:53.287,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 661) [ns_server:debug,2014-08-19T16:50:53.288,ns_1@10.242.238.88:<0.14102.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:53.288,ns_1@10.242.238.88:<0.14102.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:53.288,ns_1@10.242.238.88:<0.14101.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 661 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:53.288,ns_1@10.242.238.88:<0.14107.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 661 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:53.288,ns_1@10.242.238.88:<0.14108.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 661 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:53.293,ns_1@10.242.238.88:<0.14109.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 661 into 'ns_1@10.242.238.89' is <18124.29724.0> [ns_server:debug,2014-08-19T16:50:53.295,ns_1@10.242.238.88:<0.14109.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 661 into 'ns_1@10.242.238.90' is <18125.25394.0> [rebalance:debug,2014-08-19T16:50:53.295,ns_1@10.242.238.88:<0.14101.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 661 is <0.14109.1> [ns_server:debug,2014-08-19T16:50:53.323,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,314537}, tap_estimate, {replica_building,"default",661,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29724.0>, <<"replication_building_661_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:53.338,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,329929}, tap_estimate, {replica_building,"default",661,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25394.0>, <<"replication_building_661_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:53.339,ns_1@10.242.238.88:<0.14110.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25394.0>}, {'ns_1@10.242.238.89',<18124.29724.0>}]) [rebalance:info,2014-08-19T16:50:53.339,ns_1@10.242.238.88:<0.14101.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:53.340,ns_1@10.242.238.88:<0.14101.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 661 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:53.341,ns_1@10.242.238.88:<0.14101.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:53.341,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{661, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:53.344,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{405, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:53.344,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",405, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.14136.1>) [ns_server:debug,2014-08-19T16:50:53.345,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 405) [ns_server:debug,2014-08-19T16:50:53.345,ns_1@10.242.238.88:<0.14137.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:53.345,ns_1@10.242.238.88:<0.14137.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:53.345,ns_1@10.242.238.88:<0.14136.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 405 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:53.345,ns_1@10.242.238.88:<0.14142.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 405 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:53.346,ns_1@10.242.238.88:<0.14143.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 405 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:53.348,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 209. Nacking mccouch update. [views:debug,2014-08-19T16:50:53.349,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/209. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:53.349,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",209,active,0} [ns_server:debug,2014-08-19T16:50:53.349,ns_1@10.242.238.88:<0.14144.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 405 into 'ns_1@10.242.238.90' is <18125.25400.0> [ns_server:debug,2014-08-19T16:50:53.351,ns_1@10.242.238.88:<0.14144.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 405 into 'ns_1@10.242.238.89' is <18124.29743.0> [rebalance:debug,2014-08-19T16:50:53.351,ns_1@10.242.238.88:<0.14136.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 405 is <0.14144.1> [ns_server:debug,2014-08-19T16:50:53.351,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562,507,196, 796,741,430,975,847,664,536,481,353,170,898,770,715,587,404,276,221,949,821, 766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612,429,301,246,118, 974,846,663,535,480,352,897,769,714,586,403,275,220,948,820,765,637,454,326, 999,871,688,560,505,377,194,922,794,739,611,428,300,245,973,845,662,534,479, 351,168,896,768,713,585,402,274,219,947,819,764,636,453,325,142,998,870,687, 559,504,376,921,793,738,610,427,299,244,116,972,844,661,533,478,350,895,712, 584,401,273,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920, 792,737,609,426,298,243,971,843,660,532,477,349,166,894,711,583,400,272,217, 1022,945,817,762,634,451,323,140,996,868,685,557,502,374,919,791,736,608,425, 297,242,114,970,842,659,531,476,348,893,710,582,399,271,216,1021,944,816,761, 633,450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,241,969,841, 658,530,475,347,164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138, 994,866,683,555,500,372,917,789,734,606,423,295,240,112,968,840,657,529,474, 346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499, 371,188,916,788,733,605,422,294,239,967,839,656,528,473,345,162,890,707,579, 396,268,213,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,915,787, 732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017, 940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292, 237,965,837,654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628, 445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836, 653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158, 886,703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494, 366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263, 208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599, 416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,1012,935,807,752, 624,441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832, 649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857, 674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337,154, 882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830,519, 464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880, 569,258,803,748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007, 930,619,308,253,853,542,487,176,776,721,410,955,644,333,878,567,256,801,746, 435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306,251, 851,540,485,174,774,719,408,953,642,331,876,565,510,799,744,433,122,978,667, 356,901,590,279,224,824,513,458,692,381,1003,926,615,304,249,849,538,483,172, 772,717,406,951,640,329,874,563,508,797,742,431,120,976,665,354,899,588,277, 222,822,767,456,690,379,1001,924,613,302,247] [ns_server:debug,2014-08-19T16:50:53.378,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,369781}, tap_estimate, {replica_building,"default",405,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25400.0>, <<"replication_building_405_'ns_1@10.242.238.90'">>} [views:debug,2014-08-19T16:50:53.391,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/209. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:53.391,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",209,active,0} [ns_server:debug,2014-08-19T16:50:53.393,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,384182}, tap_estimate, {replica_building,"default",405,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29743.0>, <<"replication_building_405_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:53.393,ns_1@10.242.238.88:<0.14145.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.29743.0>}, {'ns_1@10.242.238.90',<18125.25400.0>}]) [rebalance:info,2014-08-19T16:50:53.393,ns_1@10.242.238.88:<0.14136.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:53.394,ns_1@10.242.238.88:<0.14136.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 405 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:53.394,ns_1@10.242.238.88:<0.14136.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:53.395,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{405, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:53.398,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{914, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:53.398,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",914, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.14157.1>) [ns_server:debug,2014-08-19T16:50:53.398,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 914) [ns_server:debug,2014-08-19T16:50:53.399,ns_1@10.242.238.88:<0.14158.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:53.399,ns_1@10.242.238.88:<0.14158.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:53.399,ns_1@10.242.238.88:<0.14157.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 914 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:53.399,ns_1@10.242.238.88:<0.14163.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 914 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:53.399,ns_1@10.242.238.88:<0.14164.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 914 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:53.403,ns_1@10.242.238.88:<0.14165.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 914 into 'ns_1@10.242.238.89' is <18124.29749.0> [ns_server:debug,2014-08-19T16:50:53.405,ns_1@10.242.238.88:<0.14165.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 914 into 'ns_1@10.242.238.91' is <18126.27315.0> [rebalance:debug,2014-08-19T16:50:53.405,ns_1@10.242.238.88:<0.14157.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 914 is <0.14165.1> [ns_server:debug,2014-08-19T16:50:53.431,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,422385}, tap_estimate, {replica_building,"default",914,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29749.0>, <<"replication_building_914_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:53.445,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,436441}, tap_estimate, {replica_building,"default",914,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27315.0>, <<"replication_building_914_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:53.445,ns_1@10.242.238.88:<0.14166.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27315.0>}, {'ns_1@10.242.238.89',<18124.29749.0>}]) [rebalance:info,2014-08-19T16:50:53.446,ns_1@10.242.238.88:<0.14157.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:53.446,ns_1@10.242.238.88:<0.14157.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 914 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:53.447,ns_1@10.242.238.88:<0.14157.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:53.447,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{914, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:53.450,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{660, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:53.451,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",660, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.14193.1>) [ns_server:debug,2014-08-19T16:50:53.451,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 660) [ns_server:debug,2014-08-19T16:50:53.451,ns_1@10.242.238.88:<0.14194.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:53.451,ns_1@10.242.238.88:<0.14194.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:53.451,ns_1@10.242.238.88:<0.14193.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 660 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:53.451,ns_1@10.242.238.88:<0.14199.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 660 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:53.452,ns_1@10.242.238.88:<0.14200.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 660 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:53.455,ns_1@10.242.238.88:<0.14201.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 660 into 'ns_1@10.242.238.89' is <18124.29754.0> [ns_server:debug,2014-08-19T16:50:53.458,ns_1@10.242.238.88:<0.14201.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 660 into 'ns_1@10.242.238.90' is <18125.25433.0> [rebalance:debug,2014-08-19T16:50:53.458,ns_1@10.242.238.88:<0.14193.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 660 is <0.14201.1> [ns_server:debug,2014-08-19T16:50:53.474,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 207. Nacking mccouch update. [views:debug,2014-08-19T16:50:53.474,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/207. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:53.475,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",207,active,0} [ns_server:debug,2014-08-19T16:50:53.476,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562,507,196, 796,741,430,975,847,664,536,481,353,170,898,770,715,587,404,276,221,949,821, 766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612,429,301,246,118, 974,846,663,535,480,352,897,769,714,586,403,275,220,948,820,765,637,454,326, 999,871,688,560,505,377,194,922,794,739,611,428,300,245,973,845,662,534,479, 351,168,896,768,713,585,402,274,219,947,819,764,636,453,325,142,998,870,687, 559,504,376,921,793,738,610,427,299,244,116,972,844,661,533,478,350,895,712, 584,401,273,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920, 792,737,609,426,298,243,971,843,660,532,477,349,166,894,711,583,400,272,217, 1022,945,817,762,634,451,323,140,996,868,685,557,502,374,919,791,736,608,425, 297,242,114,970,842,659,531,476,348,893,710,582,399,271,216,1021,944,816,761, 633,450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,241,969,841, 658,530,475,347,164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138, 994,866,683,555,500,372,917,789,734,606,423,295,240,112,968,840,657,529,474, 346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499, 371,188,916,788,733,605,422,294,239,967,839,656,528,473,345,162,890,707,579, 396,268,213,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,915,787, 732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017, 940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292, 237,965,837,654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628, 445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836, 653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158, 886,703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494, 366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263, 208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599, 416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807, 752,624,441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960, 832,649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985, 857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337, 154,882,699,571,388,260,1010,805,750,439,128,984,673,362,907,596,285,230,830, 519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335, 880,569,258,803,748,437,126,982,671,360,905,594,283,228,828,517,462,696,385, 1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333,878,567,256,801, 746,435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306, 251,851,540,485,174,774,719,408,953,642,331,876,565,510,799,744,433,122,978, 667,356,901,590,279,224,824,513,458,692,381,1003,926,615,304,249,849,538,483, 172,772,717,406,951,640,329,874,563,508,797,742,431,120,976,665,354,899,588, 277,222,822,767,456,690,379,1001,924,613,302,247] [ns_server:debug,2014-08-19T16:50:53.483,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,474856}, tap_estimate, {replica_building,"default",660,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29754.0>, <<"replication_building_660_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:53.497,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,488360}, tap_estimate, {replica_building,"default",660,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25433.0>, <<"replication_building_660_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:53.497,ns_1@10.242.238.88:<0.14202.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25433.0>}, {'ns_1@10.242.238.89',<18124.29754.0>}]) [rebalance:info,2014-08-19T16:50:53.498,ns_1@10.242.238.88:<0.14193.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:53.498,ns_1@10.242.238.88:<0.14193.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 660 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:53.498,ns_1@10.242.238.88:<0.14193.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:53.499,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{660, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:53.502,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{404, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:53.502,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",404, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.14214.1>) [ns_server:debug,2014-08-19T16:50:53.503,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 404) [ns_server:debug,2014-08-19T16:50:53.503,ns_1@10.242.238.88:<0.14215.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:53.503,ns_1@10.242.238.88:<0.14215.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:53.503,ns_1@10.242.238.88:<0.14214.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 404 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:53.503,ns_1@10.242.238.88:<0.14220.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 404 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:53.503,ns_1@10.242.238.88:<0.14221.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 404 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:53.507,ns_1@10.242.238.88:<0.14222.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 404 into 'ns_1@10.242.238.90' is <18125.25439.0> [ns_server:debug,2014-08-19T16:50:53.509,ns_1@10.242.238.88:<0.14222.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 404 into 'ns_1@10.242.238.89' is <18124.29759.0> [rebalance:debug,2014-08-19T16:50:53.509,ns_1@10.242.238.88:<0.14214.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 404 is <0.14222.1> [views:debug,2014-08-19T16:50:53.510,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/207. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:53.510,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",207,active,0} [ns_server:debug,2014-08-19T16:50:53.535,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,526334}, tap_estimate, {replica_building,"default",404,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25439.0>, <<"replication_building_404_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:53.549,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,540965}, tap_estimate, {replica_building,"default",404,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29759.0>, <<"replication_building_404_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:53.550,ns_1@10.242.238.88:<0.14223.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.29759.0>}, {'ns_1@10.242.238.90',<18125.25439.0>}]) [rebalance:info,2014-08-19T16:50:53.550,ns_1@10.242.238.88:<0.14214.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:53.551,ns_1@10.242.238.88:<0.14214.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 404 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:53.551,ns_1@10.242.238.88:<0.14214.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:53.552,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{404, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:53.555,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{913, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:53.555,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",913, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.14243.1>) [ns_server:debug,2014-08-19T16:50:53.555,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 913) [ns_server:debug,2014-08-19T16:50:53.555,ns_1@10.242.238.88:<0.14244.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:53.556,ns_1@10.242.238.88:<0.14244.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:53.556,ns_1@10.242.238.88:<0.14243.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 913 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:53.556,ns_1@10.242.238.88:<0.14249.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 913 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:53.556,ns_1@10.242.238.88:<0.14250.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 913 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:53.560,ns_1@10.242.238.88:<0.14251.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 913 into 'ns_1@10.242.238.89' is <18124.29779.0> [ns_server:debug,2014-08-19T16:50:53.562,ns_1@10.242.238.88:<0.14251.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 913 into 'ns_1@10.242.238.91' is <18126.27335.0> [rebalance:debug,2014-08-19T16:50:53.563,ns_1@10.242.238.88:<0.14243.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 913 is <0.14251.1> [ns_server:debug,2014-08-19T16:50:53.588,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,579821}, tap_estimate, {replica_building,"default",913,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29779.0>, <<"replication_building_913_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:53.602,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,593452}, tap_estimate, {replica_building,"default",913,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27335.0>, <<"replication_building_913_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:53.602,ns_1@10.242.238.88:<0.14258.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27335.0>}, {'ns_1@10.242.238.89',<18124.29779.0>}]) [rebalance:info,2014-08-19T16:50:53.603,ns_1@10.242.238.88:<0.14243.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:53.603,ns_1@10.242.238.88:<0.14243.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 913 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:53.603,ns_1@10.242.238.88:<0.14243.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:53.604,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{913, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:53.607,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{659, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:53.607,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",659, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.14270.1>) [ns_server:debug,2014-08-19T16:50:53.607,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 659) [ns_server:debug,2014-08-19T16:50:53.608,ns_1@10.242.238.88:<0.14271.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:53.608,ns_1@10.242.238.88:<0.14271.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:53.608,ns_1@10.242.238.88:<0.14270.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 659 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [ns_server:debug,2014-08-19T16:50:53.608,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 205. Nacking mccouch update. [rebalance:info,2014-08-19T16:50:53.608,ns_1@10.242.238.88:<0.14276.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 659 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:53.609,ns_1@10.242.238.88:<0.14277.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 659 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [views:debug,2014-08-19T16:50:53.609,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/205. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:53.609,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",205,active,0} [ns_server:debug,2014-08-19T16:50:53.611,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562,507,196, 796,741,430,975,847,664,536,481,353,170,898,770,715,587,404,276,221,949,821, 766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612,429,301,246,118, 974,846,663,535,480,352,897,769,714,586,403,275,220,948,820,765,637,454,326, 999,871,688,560,505,377,194,922,794,739,611,428,300,245,973,845,662,534,479, 351,168,896,768,713,585,402,274,219,947,819,764,636,453,325,142,998,870,687, 559,504,376,921,793,738,610,427,299,244,116,972,844,661,533,478,350,895,712, 584,401,273,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920, 792,737,609,426,298,243,971,843,660,532,477,349,166,894,711,583,400,272,217, 1022,945,817,762,634,451,323,140,996,868,685,557,502,374,919,791,736,608,425, 297,242,114,970,842,659,531,476,348,893,710,582,399,271,216,1021,944,816,761, 633,450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,241,969,841, 658,530,475,347,164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138, 994,866,683,555,500,372,917,789,734,606,423,295,240,112,968,840,657,529,474, 346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499, 371,188,916,788,733,605,422,294,239,967,839,656,528,473,345,162,890,707,579, 396,268,213,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,915,787, 732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017, 940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292, 237,965,837,654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628, 445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836, 653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158, 886,703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494, 366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263, 208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599, 416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807, 752,624,441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960, 832,649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985, 857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337, 154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230, 830,519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646, 335,880,569,258,803,748,437,126,982,671,360,905,594,283,228,828,517,462,696, 385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333,878,567,256, 801,746,435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928,617, 306,251,851,540,485,174,774,719,408,953,642,331,876,565,510,799,744,433,122, 978,667,356,901,590,279,224,824,513,458,692,381,1003,926,615,304,249,849,538, 483,172,772,717,406,951,640,329,874,563,508,797,742,431,120,976,665,354,899, 588,277,222,822,767,456,690,379,1001,924,613,302,247] [ns_server:debug,2014-08-19T16:50:53.612,ns_1@10.242.238.88:<0.14278.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 659 into 'ns_1@10.242.238.89' is <18124.29784.0> [ns_server:debug,2014-08-19T16:50:53.615,ns_1@10.242.238.88:<0.14278.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 659 into 'ns_1@10.242.238.90' is <18125.25446.0> [rebalance:debug,2014-08-19T16:50:53.615,ns_1@10.242.238.88:<0.14270.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 659 is <0.14278.1> [ns_server:debug,2014-08-19T16:50:53.642,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,633132}, tap_estimate, {replica_building,"default",659,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29784.0>, <<"replication_building_659_'ns_1@10.242.238.89'">>} [views:debug,2014-08-19T16:50:53.643,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/205. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:53.643,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",205,active,0} [ns_server:debug,2014-08-19T16:50:53.652,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,643882}, tap_estimate, {replica_building,"default",659,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25446.0>, <<"replication_building_659_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:53.653,ns_1@10.242.238.88:<0.14279.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25446.0>}, {'ns_1@10.242.238.89',<18124.29784.0>}]) [rebalance:info,2014-08-19T16:50:53.653,ns_1@10.242.238.88:<0.14270.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:53.654,ns_1@10.242.238.88:<0.14270.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 659 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:53.655,ns_1@10.242.238.88:<0.14270.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:53.655,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{659, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:53.659,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{403, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:53.659,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",403, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.14291.1>) [ns_server:debug,2014-08-19T16:50:53.659,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 403) [ns_server:debug,2014-08-19T16:50:53.660,ns_1@10.242.238.88:<0.14292.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:53.660,ns_1@10.242.238.88:<0.14292.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:53.660,ns_1@10.242.238.88:<0.14291.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 403 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:53.660,ns_1@10.242.238.88:<0.14297.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 403 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:53.660,ns_1@10.242.238.88:<0.14298.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 403 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:53.665,ns_1@10.242.238.88:<0.14299.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 403 into 'ns_1@10.242.238.90' is <18125.25466.0> [ns_server:debug,2014-08-19T16:50:53.667,ns_1@10.242.238.88:<0.14299.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 403 into 'ns_1@10.242.238.89' is <18124.29789.0> [rebalance:debug,2014-08-19T16:50:53.667,ns_1@10.242.238.88:<0.14291.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 403 is <0.14299.1> [ns_server:debug,2014-08-19T16:50:53.696,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,687783}, tap_estimate, {replica_building,"default",403,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25466.0>, <<"replication_building_403_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:53.706,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,697391}, tap_estimate, {replica_building,"default",403,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29789.0>, <<"replication_building_403_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:53.706,ns_1@10.242.238.88:<0.14300.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.29789.0>}, {'ns_1@10.242.238.90',<18125.25466.0>}]) [rebalance:info,2014-08-19T16:50:53.707,ns_1@10.242.238.88:<0.14291.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:53.707,ns_1@10.242.238.88:<0.14291.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 403 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:53.708,ns_1@10.242.238.88:<0.14291.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:53.708,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{403, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:53.711,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{912, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:53.711,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",912, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.14312.1>) [ns_server:debug,2014-08-19T16:50:53.712,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 912) [ns_server:debug,2014-08-19T16:50:53.712,ns_1@10.242.238.88:<0.14313.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:53.712,ns_1@10.242.238.88:<0.14313.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:53.712,ns_1@10.242.238.88:<0.14312.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 912 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:53.712,ns_1@10.242.238.88:<0.14318.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 912 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:53.712,ns_1@10.242.238.88:<0.14319.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 912 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:53.716,ns_1@10.242.238.88:<0.14325.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 912 into 'ns_1@10.242.238.89' is <18124.29795.0> [ns_server:debug,2014-08-19T16:50:53.719,ns_1@10.242.238.88:<0.14325.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 912 into 'ns_1@10.242.238.91' is <18126.27342.0> [rebalance:debug,2014-08-19T16:50:53.719,ns_1@10.242.238.88:<0.14312.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 912 is <0.14325.1> [ns_server:debug,2014-08-19T16:50:53.744,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,735505}, tap_estimate, {replica_building,"default",912,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29795.0>, <<"replication_building_912_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:53.752,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 203. Nacking mccouch update. [views:debug,2014-08-19T16:50:53.752,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/203. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:53.753,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",203,active,0} [ns_server:debug,2014-08-19T16:50:53.755,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562,507,196, 796,741,430,975,847,664,536,481,353,170,898,770,715,587,404,276,221,949,821, 766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612,429,301,246,118, 974,846,663,535,480,352,897,769,714,586,403,275,220,948,820,765,637,454,326, 999,871,688,560,505,377,194,922,794,739,611,428,300,245,973,845,662,534,479, 351,168,896,768,713,585,402,274,219,947,819,764,636,453,325,142,998,870,687, 559,504,376,921,793,738,610,427,299,244,116,972,844,661,533,478,350,895,712, 584,401,273,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920, 792,737,609,426,298,243,971,843,660,532,477,349,166,894,711,583,400,272,217, 1022,945,817,762,634,451,323,140,996,868,685,557,502,374,919,791,736,608,425, 297,242,114,970,842,659,531,476,348,893,710,582,399,271,216,1021,944,816,761, 633,450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,241,969,841, 658,530,475,347,164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138, 994,866,683,555,500,372,917,789,734,606,423,295,240,112,968,840,657,529,474, 346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499, 371,188,916,788,733,605,422,294,239,967,839,656,528,473,345,162,890,707,579, 396,268,213,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,915,787, 732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017, 940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292, 237,965,837,654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628, 445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836, 653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158, 886,703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494, 366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263, 208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599, 416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807, 752,624,441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960, 832,649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985, 857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337, 154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230, 830,519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646, 335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828,517,462, 696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333,878,567, 256,801,746,435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928, 617,306,251,851,540,485,174,774,719,408,953,642,331,876,565,510,799,744,433, 122,978,667,356,901,590,279,224,824,513,458,692,381,1003,926,615,304,249,849, 538,483,172,772,717,406,951,640,329,874,563,508,797,742,431,120,976,665,354, 899,588,277,222,822,767,456,690,379,1001,924,613,302,247] [ns_server:debug,2014-08-19T16:50:53.756,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,747877}, tap_estimate, {replica_building,"default",912,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27342.0>, <<"replication_building_912_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:53.757,ns_1@10.242.238.88:<0.14329.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27342.0>}, {'ns_1@10.242.238.89',<18124.29795.0>}]) [rebalance:info,2014-08-19T16:50:53.757,ns_1@10.242.238.88:<0.14312.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:53.758,ns_1@10.242.238.88:<0.14312.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 912 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:53.758,ns_1@10.242.238.88:<0.14312.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:53.759,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{912, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:53.762,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{658, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:53.762,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",658, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.14347.1>) [ns_server:debug,2014-08-19T16:50:53.762,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 658) [ns_server:debug,2014-08-19T16:50:53.763,ns_1@10.242.238.88:<0.14348.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:53.763,ns_1@10.242.238.88:<0.14348.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:53.763,ns_1@10.242.238.88:<0.14347.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 658 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:53.763,ns_1@10.242.238.88:<0.14353.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 658 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:53.763,ns_1@10.242.238.88:<0.14354.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 658 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:53.767,ns_1@10.242.238.88:<0.14355.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 658 into 'ns_1@10.242.238.89' is <18124.29800.0> [ns_server:debug,2014-08-19T16:50:53.770,ns_1@10.242.238.88:<0.14355.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 658 into 'ns_1@10.242.238.90' is <18125.25485.0> [rebalance:debug,2014-08-19T16:50:53.770,ns_1@10.242.238.88:<0.14347.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 658 is <0.14355.1> [views:debug,2014-08-19T16:50:53.794,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/203. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:53.796,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",203,active,0} [ns_server:debug,2014-08-19T16:50:53.796,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,787359}, tap_estimate, {replica_building,"default",658,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29800.0>, <<"replication_building_658_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:53.807,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,798966}, tap_estimate, {replica_building,"default",658,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25485.0>, <<"replication_building_658_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:53.808,ns_1@10.242.238.88:<0.14356.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25485.0>}, {'ns_1@10.242.238.89',<18124.29800.0>}]) [rebalance:info,2014-08-19T16:50:53.808,ns_1@10.242.238.88:<0.14347.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:53.809,ns_1@10.242.238.88:<0.14347.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 658 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:53.810,ns_1@10.242.238.88:<0.14347.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:53.810,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{658, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:53.813,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{402, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:53.814,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",402, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.14368.1>) [ns_server:debug,2014-08-19T16:50:53.814,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 402) [ns_server:debug,2014-08-19T16:50:53.814,ns_1@10.242.238.88:<0.14369.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:53.814,ns_1@10.242.238.88:<0.14369.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:53.814,ns_1@10.242.238.88:<0.14368.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 402 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:53.814,ns_1@10.242.238.88:<0.14374.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 402 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:53.814,ns_1@10.242.238.88:<0.14375.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 402 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:53.819,ns_1@10.242.238.88:<0.14376.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 402 into 'ns_1@10.242.238.90' is <18125.25491.0> [ns_server:debug,2014-08-19T16:50:53.822,ns_1@10.242.238.88:<0.14376.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 402 into 'ns_1@10.242.238.89' is <18124.29819.0> [rebalance:debug,2014-08-19T16:50:53.822,ns_1@10.242.238.88:<0.14368.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 402 is <0.14376.1> [ns_server:debug,2014-08-19T16:50:53.848,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,839061}, tap_estimate, {replica_building,"default",402,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25491.0>, <<"replication_building_402_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:53.860,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,851597}, tap_estimate, {replica_building,"default",402,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29819.0>, <<"replication_building_402_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:53.861,ns_1@10.242.238.88:<0.14377.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.29819.0>}, {'ns_1@10.242.238.90',<18125.25491.0>}]) [rebalance:info,2014-08-19T16:50:53.861,ns_1@10.242.238.88:<0.14368.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:53.861,ns_1@10.242.238.88:<0.14368.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 402 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:53.862,ns_1@10.242.238.88:<0.14368.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:53.863,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{402, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:53.866,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{911, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:53.866,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",911, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.14395.1>) [ns_server:debug,2014-08-19T16:50:53.866,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 911) [ns_server:debug,2014-08-19T16:50:53.866,ns_1@10.242.238.88:<0.14396.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:53.866,ns_1@10.242.238.88:<0.14396.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:53.867,ns_1@10.242.238.88:<0.14395.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 911 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:53.867,ns_1@10.242.238.88:<0.14403.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 911 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:53.867,ns_1@10.242.238.88:<0.14404.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 911 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:53.870,ns_1@10.242.238.88:<0.14405.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 911 into 'ns_1@10.242.238.89' is <18124.29831.0> [ns_server:debug,2014-08-19T16:50:53.873,ns_1@10.242.238.88:<0.14405.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 911 into 'ns_1@10.242.238.91' is <18126.27362.0> [rebalance:debug,2014-08-19T16:50:53.873,ns_1@10.242.238.88:<0.14395.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 911 is <0.14405.1> [ns_server:debug,2014-08-19T16:50:53.899,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,890171}, tap_estimate, {replica_building,"default",911,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29831.0>, <<"replication_building_911_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:53.913,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,904142}, tap_estimate, {replica_building,"default",911,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27362.0>, <<"replication_building_911_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:53.913,ns_1@10.242.238.88:<0.14412.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27362.0>}, {'ns_1@10.242.238.89',<18124.29831.0>}]) [rebalance:info,2014-08-19T16:50:53.913,ns_1@10.242.238.88:<0.14395.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:53.914,ns_1@10.242.238.88:<0.14395.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 911 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:53.914,ns_1@10.242.238.88:<0.14395.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:53.915,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{911, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:53.918,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{657, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:53.918,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",657, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.14424.1>) [ns_server:debug,2014-08-19T16:50:53.918,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 657) [ns_server:debug,2014-08-19T16:50:53.919,ns_1@10.242.238.88:<0.14425.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:53.919,ns_1@10.242.238.88:<0.14425.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:53.919,ns_1@10.242.238.88:<0.14424.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 657 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:53.919,ns_1@10.242.238.88:<0.14430.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 657 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:53.920,ns_1@10.242.238.88:<0.14431.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 657 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:53.920,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 201. Nacking mccouch update. [views:debug,2014-08-19T16:50:53.920,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/201. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:53.920,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",201,active,0} [ns_server:debug,2014-08-19T16:50:53.922,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562,507,196, 796,741,430,975,847,664,536,481,353,170,898,770,715,587,404,276,221,949,821, 766,638,455,327,144,872,689,561,506,378,1000,923,795,740,612,429,301,246,118, 974,846,663,535,480,352,897,769,714,586,403,275,220,948,820,765,637,454,326, 999,871,688,560,505,377,194,922,794,739,611,428,300,245,973,845,662,534,479, 351,168,896,768,713,585,402,274,219,947,819,764,636,453,325,142,998,870,687, 559,504,376,921,793,738,610,427,299,244,116,972,844,661,533,478,350,895,712, 584,401,273,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920, 792,737,609,426,298,243,971,843,660,532,477,349,166,894,711,583,400,272,217, 1022,945,817,762,634,451,323,140,996,868,685,557,502,374,919,791,736,608,425, 297,242,114,970,842,659,531,476,348,893,710,582,399,271,216,1021,944,816,761, 633,450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,241,969,841, 658,530,475,347,164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138, 994,866,683,555,500,372,917,789,734,606,423,295,240,112,968,840,657,529,474, 346,891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499, 371,188,916,788,733,605,422,294,239,967,839,656,528,473,345,162,890,707,579, 396,268,213,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,915,787, 732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017, 940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292, 237,965,837,654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628, 445,317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836, 653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158, 886,703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494, 366,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263, 208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599, 416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807, 752,624,441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960, 832,649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985, 857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337, 154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230, 830,519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646, 335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828,517,462, 696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333,878,567, 256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005, 928,617,306,251,851,540,485,174,774,719,408,953,642,331,876,565,510,799,744, 433,122,978,667,356,901,590,279,224,824,513,458,692,381,1003,926,615,304,249, 849,538,483,172,772,717,406,951,640,329,874,563,508,797,742,431,120,976,665, 354,899,588,277,222,822,767,456,690,379,1001,924,613,302,247] [ns_server:debug,2014-08-19T16:50:53.924,ns_1@10.242.238.88:<0.14432.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 657 into 'ns_1@10.242.238.89' is <18124.29836.0> [ns_server:debug,2014-08-19T16:50:53.926,ns_1@10.242.238.88:<0.14432.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 657 into 'ns_1@10.242.238.90' is <18125.25510.0> [rebalance:debug,2014-08-19T16:50:53.926,ns_1@10.242.238.88:<0.14424.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 657 is <0.14432.1> [ns_server:debug,2014-08-19T16:50:53.953,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,944815}, tap_estimate, {replica_building,"default",657,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29836.0>, <<"replication_building_657_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:53.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,958301}, tap_estimate, {replica_building,"default",657,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25510.0>, <<"replication_building_657_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:53.967,ns_1@10.242.238.88:<0.14433.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25510.0>}, {'ns_1@10.242.238.89',<18124.29836.0>}]) [rebalance:info,2014-08-19T16:50:53.967,ns_1@10.242.238.88:<0.14424.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:53.968,ns_1@10.242.238.88:<0.14424.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 657 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:53.969,ns_1@10.242.238.88:<0.14424.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:53.969,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{657, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:53.972,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{401, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:53.973,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",401, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.14445.1>) [ns_server:debug,2014-08-19T16:50:53.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 401) [ns_server:debug,2014-08-19T16:50:53.973,ns_1@10.242.238.88:<0.14446.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:53.973,ns_1@10.242.238.88:<0.14446.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:53.973,ns_1@10.242.238.88:<0.14445.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 401 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:53.974,ns_1@10.242.238.88:<0.14451.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 401 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:53.974,ns_1@10.242.238.88:<0.14452.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 401 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:53.977,ns_1@10.242.238.88:<0.14453.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 401 into 'ns_1@10.242.238.90' is <18125.25516.0> [ns_server:debug,2014-08-19T16:50:53.980,ns_1@10.242.238.88:<0.14453.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 401 into 'ns_1@10.242.238.89' is <18124.29855.0> [rebalance:debug,2014-08-19T16:50:53.980,ns_1@10.242.238.88:<0.14445.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 401 is <0.14453.1> [views:debug,2014-08-19T16:50:53.985,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/201. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:53.986,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",201,active,0} [ns_server:debug,2014-08-19T16:50:54.005,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452653,996813}, tap_estimate, {replica_building,"default",401,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25516.0>, <<"replication_building_401_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:54.018,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,9569}, tap_estimate, {replica_building,"default",401,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29855.0>, <<"replication_building_401_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:54.019,ns_1@10.242.238.88:<0.14454.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.29855.0>}, {'ns_1@10.242.238.90',<18125.25516.0>}]) [rebalance:info,2014-08-19T16:50:54.019,ns_1@10.242.238.88:<0.14445.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:54.019,ns_1@10.242.238.88:<0.14445.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 401 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:54.020,ns_1@10.242.238.88:<0.14445.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:54.020,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{401, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:54.023,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{910, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:54.024,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",910, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.14466.1>) [ns_server:debug,2014-08-19T16:50:54.024,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 910) [ns_server:debug,2014-08-19T16:50:54.024,ns_1@10.242.238.88:<0.14467.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:54.024,ns_1@10.242.238.88:<0.14467.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:54.024,ns_1@10.242.238.88:<0.14466.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 910 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:54.025,ns_1@10.242.238.88:<0.14472.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 910 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:54.025,ns_1@10.242.238.88:<0.14473.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 910 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:54.028,ns_1@10.242.238.88:<0.14474.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 910 into 'ns_1@10.242.238.89' is <18124.29861.0> [ns_server:debug,2014-08-19T16:50:54.031,ns_1@10.242.238.88:<0.14474.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 910 into 'ns_1@10.242.238.91' is <18126.27368.0> [rebalance:debug,2014-08-19T16:50:54.031,ns_1@10.242.238.88:<0.14466.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 910 is <0.14474.1> [ns_server:debug,2014-08-19T16:50:54.057,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,48877}, tap_estimate, {replica_building,"default",910,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29861.0>, <<"replication_building_910_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:54.068,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,59962}, tap_estimate, {replica_building,"default",910,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27368.0>, <<"replication_building_910_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:54.069,ns_1@10.242.238.88:<0.14475.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27368.0>}, {'ns_1@10.242.238.89',<18124.29861.0>}]) [rebalance:info,2014-08-19T16:50:54.069,ns_1@10.242.238.88:<0.14466.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:54.070,ns_1@10.242.238.88:<0.14466.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 910 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:54.070,ns_1@10.242.238.88:<0.14466.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:54.071,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{910, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:54.076,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{656, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:54.076,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",656, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.14495.1>) [ns_server:debug,2014-08-19T16:50:54.076,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 656) [ns_server:debug,2014-08-19T16:50:54.077,ns_1@10.242.238.88:<0.14496.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:54.077,ns_1@10.242.238.88:<0.14496.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:54.077,ns_1@10.242.238.88:<0.14495.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 656 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:54.077,ns_1@10.242.238.88:<0.14501.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 656 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:54.077,ns_1@10.242.238.88:<0.14502.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 656 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:54.081,ns_1@10.242.238.88:<0.14509.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 656 into 'ns_1@10.242.238.89' is <18124.29867.0> [ns_server:debug,2014-08-19T16:50:54.084,ns_1@10.242.238.88:<0.14509.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 656 into 'ns_1@10.242.238.90' is <18125.25535.0> [rebalance:debug,2014-08-19T16:50:54.084,ns_1@10.242.238.88:<0.14495.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 656 is <0.14509.1> [ns_server:debug,2014-08-19T16:50:54.113,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,100903}, tap_estimate, {replica_building,"default",656,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29867.0>, <<"replication_building_656_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:54.122,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,113821}, tap_estimate, {replica_building,"default",656,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25535.0>, <<"replication_building_656_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:54.123,ns_1@10.242.238.88:<0.14510.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25535.0>}, {'ns_1@10.242.238.89',<18124.29867.0>}]) [rebalance:info,2014-08-19T16:50:54.123,ns_1@10.242.238.88:<0.14495.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:54.124,ns_1@10.242.238.88:<0.14495.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 656 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:54.124,ns_1@10.242.238.88:<0.14495.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:54.125,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{656, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:54.128,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{400, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:54.128,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",400, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.14522.1>) [ns_server:debug,2014-08-19T16:50:54.128,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 400) [ns_server:debug,2014-08-19T16:50:54.129,ns_1@10.242.238.88:<0.14523.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:54.129,ns_1@10.242.238.88:<0.14523.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:54.129,ns_1@10.242.238.88:<0.14522.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 400 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:54.129,ns_1@10.242.238.88:<0.14528.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 400 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:54.129,ns_1@10.242.238.88:<0.14529.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 400 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:54.133,ns_1@10.242.238.88:<0.14530.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 400 into 'ns_1@10.242.238.90' is <18125.25555.0> [ns_server:debug,2014-08-19T16:50:54.135,ns_1@10.242.238.88:<0.14530.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 400 into 'ns_1@10.242.238.89' is <18124.29886.0> [rebalance:debug,2014-08-19T16:50:54.135,ns_1@10.242.238.88:<0.14522.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 400 is <0.14530.1> [ns_server:debug,2014-08-19T16:50:54.160,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 199. Nacking mccouch update. [views:debug,2014-08-19T16:50:54.161,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/199. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:54.161,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",199,active,0} [ns_server:debug,2014-08-19T16:50:54.162,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,153238}, tap_estimate, {replica_building,"default",400,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25555.0>, <<"replication_building_400_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:54.163,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562,507,196, 796,741,430,975,664,353,898,770,715,587,404,276,221,949,821,766,638,455,327, 144,872,689,561,506,378,1000,923,795,740,612,429,301,246,118,974,846,663,535, 480,352,897,769,714,586,403,275,220,948,820,765,637,454,326,999,871,688,560, 505,377,194,922,794,739,611,428,300,245,973,845,662,534,479,351,168,896,768, 713,585,402,274,219,947,819,764,636,453,325,142,998,870,687,559,504,376,921, 793,738,610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,273,218, 1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426, 298,243,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817,762, 634,451,323,140,996,868,685,557,502,374,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,893,710,582,399,271,216,1021,944,816,761,633,450,322,995, 867,684,556,501,373,190,918,790,735,607,424,296,241,969,841,658,530,475,347, 164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,555, 500,372,917,789,734,606,423,295,240,112,968,840,657,529,474,346,891,708,580, 397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788, 733,605,422,294,239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018, 941,813,758,630,447,319,136,992,864,681,553,498,370,915,787,732,604,421,293, 238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017,940,812,757,629, 446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837,654, 526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990, 862,679,551,496,368,913,785,730,602,419,291,236,108,964,836,653,525,470,342, 887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678,550,495,367, 184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703,575,392, 264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,911,783,728, 600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208,1013,936,808, 753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,233,961, 833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752,624,441,313, 130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521,466, 338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674,546,491, 363,180,908,780,725,597,414,286,231,959,831,648,520,465,337,154,882,699,571, 388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698, 387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880,569,258, 203,803,748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930, 619,308,253,853,542,487,176,776,721,410,955,644,333,878,567,256,201,801,746, 435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306,251, 851,540,485,174,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978, 667,356,901,590,279,224,824,513,458,692,381,1003,926,615,304,249,849,538,483, 172,772,717,406,951,640,329,874,563,508,797,742,431,120,976,665,354,899,588, 277,222,822,767,456,690,379,1001,924,613,302,247,847,536,481,170] [ns_server:debug,2014-08-19T16:50:54.176,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,167099}, tap_estimate, {replica_building,"default",400,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29886.0>, <<"replication_building_400_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:54.176,ns_1@10.242.238.88:<0.14531.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.29886.0>}, {'ns_1@10.242.238.90',<18125.25555.0>}]) [rebalance:info,2014-08-19T16:50:54.176,ns_1@10.242.238.88:<0.14522.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:54.177,ns_1@10.242.238.88:<0.14522.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 400 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:54.177,ns_1@10.242.238.88:<0.14522.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:54.178,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{400, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:54.181,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{909, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:54.181,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",909, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.14543.1>) [ns_server:debug,2014-08-19T16:50:54.181,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 909) [ns_server:debug,2014-08-19T16:50:54.181,ns_1@10.242.238.88:<0.14544.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:54.182,ns_1@10.242.238.88:<0.14544.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:54.182,ns_1@10.242.238.88:<0.14543.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 909 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:54.182,ns_1@10.242.238.88:<0.14549.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 909 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:54.182,ns_1@10.242.238.88:<0.14550.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 909 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:54.185,ns_1@10.242.238.88:<0.14551.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 909 into 'ns_1@10.242.238.89' is <18124.29892.0> [ns_server:debug,2014-08-19T16:50:54.188,ns_1@10.242.238.88:<0.14551.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 909 into 'ns_1@10.242.238.91' is <18126.27388.0> [rebalance:debug,2014-08-19T16:50:54.188,ns_1@10.242.238.88:<0.14543.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 909 is <0.14551.1> [ns_server:debug,2014-08-19T16:50:54.214,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,205629}, tap_estimate, {replica_building,"default",909,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29892.0>, <<"replication_building_909_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:54.227,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,218510}, tap_estimate, {replica_building,"default",909,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27388.0>, <<"replication_building_909_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:54.228,ns_1@10.242.238.88:<0.14552.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27388.0>}, {'ns_1@10.242.238.89',<18124.29892.0>}]) [rebalance:info,2014-08-19T16:50:54.228,ns_1@10.242.238.88:<0.14543.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:54.228,ns_1@10.242.238.88:<0.14543.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 909 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:54.229,ns_1@10.242.238.88:<0.14543.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:54.230,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{909, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:54.233,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{655, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:54.233,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",655, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.14564.1>) [ns_server:debug,2014-08-19T16:50:54.233,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 655) [ns_server:debug,2014-08-19T16:50:54.234,ns_1@10.242.238.88:<0.14565.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:54.234,ns_1@10.242.238.88:<0.14565.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:54.234,ns_1@10.242.238.88:<0.14564.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 655 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:54.234,ns_1@10.242.238.88:<0.14570.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 655 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:54.234,ns_1@10.242.238.88:<0.14571.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 655 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:54.239,ns_1@10.242.238.88:<0.14572.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 655 into 'ns_1@10.242.238.89' is <18124.29897.0> [ns_server:debug,2014-08-19T16:50:54.242,ns_1@10.242.238.88:<0.14572.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 655 into 'ns_1@10.242.238.90' is <18125.25560.0> [rebalance:debug,2014-08-19T16:50:54.242,ns_1@10.242.238.88:<0.14564.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 655 is <0.14572.1> [views:debug,2014-08-19T16:50:54.244,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/199. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:54.245,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",199,active,0} [ns_server:debug,2014-08-19T16:50:54.268,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,259765}, tap_estimate, {replica_building,"default",655,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29897.0>, <<"replication_building_655_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:54.285,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,276399}, tap_estimate, {replica_building,"default",655,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25560.0>, <<"replication_building_655_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:54.286,ns_1@10.242.238.88:<0.14573.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25560.0>}, {'ns_1@10.242.238.89',<18124.29897.0>}]) [rebalance:info,2014-08-19T16:50:54.286,ns_1@10.242.238.88:<0.14564.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:54.286,ns_1@10.242.238.88:<0.14564.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 655 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:54.287,ns_1@10.242.238.88:<0.14564.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:54.287,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{655, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:54.290,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{399, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:54.291,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",399, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.14585.1>) [ns_server:debug,2014-08-19T16:50:54.291,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 399) [ns_server:debug,2014-08-19T16:50:54.291,ns_1@10.242.238.88:<0.14586.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:54.291,ns_1@10.242.238.88:<0.14586.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:54.291,ns_1@10.242.238.88:<0.14585.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 399 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:54.291,ns_1@10.242.238.88:<0.14591.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 399 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:54.292,ns_1@10.242.238.88:<0.14592.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 399 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:54.295,ns_1@10.242.238.88:<0.14593.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 399 into 'ns_1@10.242.238.90' is <18125.25580.0> [ns_server:debug,2014-08-19T16:50:54.298,ns_1@10.242.238.88:<0.14593.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 399 into 'ns_1@10.242.238.89' is <18124.29916.0> [rebalance:debug,2014-08-19T16:50:54.298,ns_1@10.242.238.88:<0.14585.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 399 is <0.14593.1> [ns_server:debug,2014-08-19T16:50:54.325,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,316013}, tap_estimate, {replica_building,"default",399,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25580.0>, <<"replication_building_399_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:54.336,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,327027}, tap_estimate, {replica_building,"default",399,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29916.0>, <<"replication_building_399_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:54.336,ns_1@10.242.238.88:<0.14594.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.29916.0>}, {'ns_1@10.242.238.90',<18125.25580.0>}]) [rebalance:info,2014-08-19T16:50:54.336,ns_1@10.242.238.88:<0.14585.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:54.337,ns_1@10.242.238.88:<0.14585.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 399 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:54.338,ns_1@10.242.238.88:<0.14585.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:54.338,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{399, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:54.341,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{908, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:54.341,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",908, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.14620.1>) [ns_server:debug,2014-08-19T16:50:54.341,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 908) [ns_server:debug,2014-08-19T16:50:54.342,ns_1@10.242.238.88:<0.14621.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:54.342,ns_1@10.242.238.88:<0.14621.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:54.342,ns_1@10.242.238.88:<0.14620.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 908 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:54.342,ns_1@10.242.238.88:<0.14626.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 908 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:54.342,ns_1@10.242.238.88:<0.14627.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 908 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:54.346,ns_1@10.242.238.88:<0.14628.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 908 into 'ns_1@10.242.238.89' is <18124.29936.0> [ns_server:debug,2014-08-19T16:50:54.349,ns_1@10.242.238.88:<0.14628.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 908 into 'ns_1@10.242.238.91' is <18126.27408.0> [rebalance:debug,2014-08-19T16:50:54.349,ns_1@10.242.238.88:<0.14620.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 908 is <0.14628.1> [ns_server:debug,2014-08-19T16:50:54.376,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,367261}, tap_estimate, {replica_building,"default",908,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29936.0>, <<"replication_building_908_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:54.387,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,378928}, tap_estimate, {replica_building,"default",908,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27408.0>, <<"replication_building_908_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:54.388,ns_1@10.242.238.88:<0.14629.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27408.0>}, {'ns_1@10.242.238.89',<18124.29936.0>}]) [rebalance:info,2014-08-19T16:50:54.388,ns_1@10.242.238.88:<0.14620.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:54.389,ns_1@10.242.238.88:<0.14620.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 908 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:54.389,ns_1@10.242.238.88:<0.14620.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:54.390,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{908, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:54.393,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{654, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:54.393,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",654, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.14641.1>) [ns_server:debug,2014-08-19T16:50:54.393,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 654) [ns_server:debug,2014-08-19T16:50:54.393,ns_1@10.242.238.88:<0.14642.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:54.394,ns_1@10.242.238.88:<0.14642.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:54.394,ns_1@10.242.238.88:<0.14641.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 654 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:54.394,ns_1@10.242.238.88:<0.14647.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 654 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:54.394,ns_1@10.242.238.88:<0.14648.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 654 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:54.398,ns_1@10.242.238.88:<0.14649.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 654 into 'ns_1@10.242.238.89' is <18124.29941.0> [ns_server:debug,2014-08-19T16:50:54.401,ns_1@10.242.238.88:<0.14649.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 654 into 'ns_1@10.242.238.90' is <18125.25599.0> [rebalance:debug,2014-08-19T16:50:54.401,ns_1@10.242.238.88:<0.14641.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 654 is <0.14649.1> [ns_server:debug,2014-08-19T16:50:54.419,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 197. Nacking mccouch update. [views:debug,2014-08-19T16:50:54.420,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/197. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:54.420,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",197,active,0} [ns_server:debug,2014-08-19T16:50:54.422,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562,507,196, 796,741,430,975,664,353,898,770,715,587,404,276,221,949,821,766,638,455,327, 144,872,689,561,506,378,1000,923,795,740,612,429,301,246,118,974,846,663,535, 480,352,897,769,714,586,403,275,220,948,820,765,637,454,326,999,871,688,560, 505,377,194,922,794,739,611,428,300,245,973,845,662,534,479,351,168,896,768, 713,585,402,274,219,947,819,764,636,453,325,142,998,870,687,559,504,376,921, 793,738,610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,273,218, 1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426, 298,243,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817,762, 634,451,323,140,996,868,685,557,502,374,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,893,710,582,399,271,216,1021,944,816,761,633,450,322,995, 867,684,556,501,373,190,918,790,735,607,424,296,241,969,841,658,530,475,347, 164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,555, 500,372,917,789,734,606,423,295,240,112,968,840,657,529,474,346,891,708,580, 397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788, 733,605,422,294,239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018, 941,813,758,630,447,319,136,992,864,681,553,498,370,915,787,732,604,421,293, 238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017,940,812,757,629, 446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837,654, 526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990, 862,679,551,496,368,913,785,730,602,419,291,236,108,964,836,653,525,470,342, 887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678,550,495,367, 184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703,575,392, 264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,911,783,728, 600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208,1013,936,808, 753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,233,961, 833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752,624,441,313, 130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521,466, 338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674,546,491, 363,180,908,780,725,597,414,286,231,959,831,648,520,465,337,154,882,699,571, 388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464,698, 387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880,569,258, 203,803,748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007,930, 619,308,253,853,542,487,176,776,721,410,955,644,333,878,567,256,201,801,746, 435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306,251, 851,540,485,174,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978, 667,356,901,590,279,224,824,513,458,692,381,1003,926,615,304,249,849,538,483, 172,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,354,899, 588,277,222,822,767,456,690,379,1001,924,613,302,247,847,536,481,170] [ns_server:debug,2014-08-19T16:50:54.427,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,418305}, tap_estimate, {replica_building,"default",654,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29941.0>, <<"replication_building_654_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:54.439,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,430813}, tap_estimate, {replica_building,"default",654,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25599.0>, <<"replication_building_654_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:54.440,ns_1@10.242.238.88:<0.14650.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25599.0>}, {'ns_1@10.242.238.89',<18124.29941.0>}]) [rebalance:info,2014-08-19T16:50:54.440,ns_1@10.242.238.88:<0.14641.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:54.441,ns_1@10.242.238.88:<0.14641.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 654 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:54.441,ns_1@10.242.238.88:<0.14641.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:54.442,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{654, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:54.445,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{398, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:54.445,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",398, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.14662.1>) [ns_server:debug,2014-08-19T16:50:54.445,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 398) [ns_server:debug,2014-08-19T16:50:54.445,ns_1@10.242.238.88:<0.14663.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:54.446,ns_1@10.242.238.88:<0.14663.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:54.446,ns_1@10.242.238.88:<0.14662.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 398 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:54.446,ns_1@10.242.238.88:<0.14668.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 398 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:54.446,ns_1@10.242.238.88:<0.14669.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 398 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:54.450,ns_1@10.242.238.88:<0.14670.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 398 into 'ns_1@10.242.238.90' is <18125.25605.0> [ns_server:debug,2014-08-19T16:50:54.452,ns_1@10.242.238.88:<0.14670.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 398 into 'ns_1@10.242.238.89' is <18124.29946.0> [rebalance:debug,2014-08-19T16:50:54.452,ns_1@10.242.238.88:<0.14662.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 398 is <0.14670.1> [ns_server:debug,2014-08-19T16:50:54.480,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,471312}, tap_estimate, {replica_building,"default",398,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25605.0>, <<"replication_building_398_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:54.491,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,482275}, tap_estimate, {replica_building,"default",398,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29946.0>, <<"replication_building_398_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:54.491,ns_1@10.242.238.88:<0.14671.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.29946.0>}, {'ns_1@10.242.238.90',<18125.25605.0>}]) [rebalance:info,2014-08-19T16:50:54.491,ns_1@10.242.238.88:<0.14662.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:54.492,ns_1@10.242.238.88:<0.14662.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 398 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:54.492,ns_1@10.242.238.88:<0.14662.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:54.493,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{398, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:54.496,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{907, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:54.496,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",907, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.14683.1>) [ns_server:debug,2014-08-19T16:50:54.496,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 907) [ns_server:debug,2014-08-19T16:50:54.497,ns_1@10.242.238.88:<0.14684.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:54.497,ns_1@10.242.238.88:<0.14684.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:54.497,ns_1@10.242.238.88:<0.14683.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 907 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:54.497,ns_1@10.242.238.88:<0.14689.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 907 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:54.497,ns_1@10.242.238.88:<0.14690.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 907 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:54.501,ns_1@10.242.238.88:<0.14691.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 907 into 'ns_1@10.242.238.89' is <18124.29966.0> [views:debug,2014-08-19T16:50:54.503,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/197. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:54.504,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",197,active,0} [ns_server:debug,2014-08-19T16:50:54.504,ns_1@10.242.238.88:<0.14691.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 907 into 'ns_1@10.242.238.91' is <18126.27414.0> [rebalance:debug,2014-08-19T16:50:54.504,ns_1@10.242.238.88:<0.14683.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 907 is <0.14691.1> [ns_server:debug,2014-08-19T16:50:54.530,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,521718}, tap_estimate, {replica_building,"default",907,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29966.0>, <<"replication_building_907_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:54.541,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,532684}, tap_estimate, {replica_building,"default",907,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27414.0>, <<"replication_building_907_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:54.542,ns_1@10.242.238.88:<0.14692.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27414.0>}, {'ns_1@10.242.238.89',<18124.29966.0>}]) [rebalance:info,2014-08-19T16:50:54.542,ns_1@10.242.238.88:<0.14683.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:54.542,ns_1@10.242.238.88:<0.14683.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 907 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:54.543,ns_1@10.242.238.88:<0.14683.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:54.543,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{907, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:54.546,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{653, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:54.547,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",653, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.14704.1>) [ns_server:debug,2014-08-19T16:50:54.547,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 653) [ns_server:debug,2014-08-19T16:50:54.547,ns_1@10.242.238.88:<0.14705.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:54.547,ns_1@10.242.238.88:<0.14705.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:54.547,ns_1@10.242.238.88:<0.14704.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 653 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:54.547,ns_1@10.242.238.88:<0.14710.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 653 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:54.547,ns_1@10.242.238.88:<0.14711.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 653 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:54.553,ns_1@10.242.238.88:<0.14712.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 653 into 'ns_1@10.242.238.89' is <18124.29971.0> [ns_server:debug,2014-08-19T16:50:54.555,ns_1@10.242.238.88:<0.14712.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 653 into 'ns_1@10.242.238.90' is <18125.25625.0> [rebalance:debug,2014-08-19T16:50:54.555,ns_1@10.242.238.88:<0.14704.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 653 is <0.14712.1> [ns_server:debug,2014-08-19T16:50:54.586,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,577272}, tap_estimate, {replica_building,"default",653,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29971.0>, <<"replication_building_653_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:54.598,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,589466}, tap_estimate, {replica_building,"default",653,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25625.0>, <<"replication_building_653_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:54.599,ns_1@10.242.238.88:<0.14713.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25625.0>}, {'ns_1@10.242.238.89',<18124.29971.0>}]) [rebalance:info,2014-08-19T16:50:54.599,ns_1@10.242.238.88:<0.14704.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:54.599,ns_1@10.242.238.88:<0.14704.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 653 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:54.600,ns_1@10.242.238.88:<0.14704.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:54.600,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{653, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:54.604,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{397, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:54.604,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",397, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.14739.1>) [ns_server:debug,2014-08-19T16:50:54.604,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 397) [ns_server:debug,2014-08-19T16:50:54.604,ns_1@10.242.238.88:<0.14740.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:54.604,ns_1@10.242.238.88:<0.14740.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:54.605,ns_1@10.242.238.88:<0.14739.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 397 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:54.605,ns_1@10.242.238.88:<0.14745.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 397 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:54.605,ns_1@10.242.238.88:<0.14746.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 397 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:54.609,ns_1@10.242.238.88:<0.14747.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 397 into 'ns_1@10.242.238.90' is <18125.25631.0> [ns_server:debug,2014-08-19T16:50:54.611,ns_1@10.242.238.88:<0.14747.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 397 into 'ns_1@10.242.238.89' is <18124.29976.0> [rebalance:debug,2014-08-19T16:50:54.611,ns_1@10.242.238.88:<0.14739.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 397 is <0.14747.1> [ns_server:debug,2014-08-19T16:50:54.638,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,629703}, tap_estimate, {replica_building,"default",397,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25631.0>, <<"replication_building_397_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:54.650,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,641288}, tap_estimate, {replica_building,"default",397,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29976.0>, <<"replication_building_397_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:54.651,ns_1@10.242.238.88:<0.14748.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.29976.0>}, {'ns_1@10.242.238.90',<18125.25631.0>}]) [rebalance:info,2014-08-19T16:50:54.652,ns_1@10.242.238.88:<0.14739.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:54.652,ns_1@10.242.238.88:<0.14739.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 397 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:54.653,ns_1@10.242.238.88:<0.14739.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:54.653,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{397, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:54.656,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{906, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:54.656,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",906, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.14760.1>) [ns_server:debug,2014-08-19T16:50:54.657,ns_1@10.242.238.88:<0.14761.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:54.657,ns_1@10.242.238.88:<0.14761.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:54.658,ns_1@10.242.238.88:<0.14760.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 906 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:54.658,ns_1@10.242.238.88:<0.14766.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 906 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:54.658,ns_1@10.242.238.88:<0.14767.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 906 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:54.661,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 906) [ns_server:debug,2014-08-19T16:50:54.662,ns_1@10.242.238.88:<0.14768.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 906 into 'ns_1@10.242.238.89' is <18124.29982.0> [ns_server:debug,2014-08-19T16:50:54.664,ns_1@10.242.238.88:<0.14768.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 906 into 'ns_1@10.242.238.91' is <18126.27454.0> [rebalance:debug,2014-08-19T16:50:54.665,ns_1@10.242.238.88:<0.14760.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 906 is <0.14768.1> [ns_server:debug,2014-08-19T16:50:54.670,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 195. Nacking mccouch update. [views:debug,2014-08-19T16:50:54.670,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/195. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:54.672,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562,507,196, 796,741,430,975,664,353,898,770,715,587,404,276,221,949,821,766,638,455,327, 144,872,689,561,506,378,195,1000,923,795,740,612,429,301,246,118,974,846,663, 535,480,352,897,769,714,586,403,275,220,948,820,765,637,454,326,999,871,688, 560,505,377,194,922,794,739,611,428,300,245,973,845,662,534,479,351,168,896, 768,713,585,402,274,219,947,819,764,636,453,325,142,998,870,687,559,504,376, 921,793,738,610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,273, 218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609, 426,298,243,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817, 762,634,451,323,140,996,868,685,557,502,374,919,791,736,608,425,297,242,114, 970,842,659,531,476,348,893,710,582,399,271,216,1021,944,816,761,633,450,322, 995,867,684,556,501,373,190,918,790,735,607,424,296,241,969,841,658,530,475, 347,164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683, 555,500,372,917,789,734,606,423,295,240,112,968,840,657,529,474,346,891,708, 580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916, 788,733,605,422,294,239,967,839,656,528,473,345,162,890,707,579,396,268,213, 1018,941,813,758,630,447,319,136,992,864,681,553,498,370,915,787,732,604,421, 293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017,940,812,757, 629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837, 654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134, 990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836,653,525,470, 342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678,550,495, 367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703,575, 392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,911,783, 728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208,1013,936, 808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,233, 961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752,624,441, 313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521, 466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674,546, 491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337,154,882,699, 571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464, 698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880,569, 258,203,803,748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007, 930,619,308,253,853,542,487,176,776,721,410,955,644,333,878,567,256,201,801, 746,435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306, 251,851,540,485,174,774,719,408,953,642,331,876,565,510,199,799,744,433,122, 978,667,356,901,590,279,224,824,513,458,692,381,1003,926,615,304,249,849,538, 483,172,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,354, 899,588,277,222,822,767,456,690,379,1001,924,613,302,247,847,536,481,170] [ns_server:debug,2014-08-19T16:50:54.674,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",195,active,0} [ns_server:debug,2014-08-19T16:50:54.691,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,682093}, tap_estimate, {replica_building,"default",906,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.29982.0>, <<"replication_building_906_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:54.704,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,695354}, tap_estimate, {replica_building,"default",906,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27454.0>, <<"replication_building_906_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:54.704,ns_1@10.242.238.88:<0.14769.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27454.0>}, {'ns_1@10.242.238.89',<18124.29982.0>}]) [rebalance:info,2014-08-19T16:50:54.704,ns_1@10.242.238.88:<0.14760.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:54.705,ns_1@10.242.238.88:<0.14760.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 906 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:54.705,ns_1@10.242.238.88:<0.14760.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:54.706,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{906, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:54.709,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{652, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:54.709,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",652, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.14781.1>) [ns_server:debug,2014-08-19T16:50:54.709,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 652) [ns_server:debug,2014-08-19T16:50:54.709,ns_1@10.242.238.88:<0.14782.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:54.710,ns_1@10.242.238.88:<0.14782.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:54.710,ns_1@10.242.238.88:<0.14781.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 652 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:54.710,ns_1@10.242.238.88:<0.14787.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 652 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:54.710,ns_1@10.242.238.88:<0.14788.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 652 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:54.714,ns_1@10.242.238.88:<0.14789.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 652 into 'ns_1@10.242.238.89' is <18124.30001.0> [ns_server:debug,2014-08-19T16:50:54.716,ns_1@10.242.238.88:<0.14789.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 652 into 'ns_1@10.242.238.90' is <18125.25650.0> [rebalance:debug,2014-08-19T16:50:54.716,ns_1@10.242.238.88:<0.14781.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 652 is <0.14789.1> [ns_server:debug,2014-08-19T16:50:54.742,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,733525}, tap_estimate, {replica_building,"default",652,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30001.0>, <<"replication_building_652_'ns_1@10.242.238.89'">>} [views:debug,2014-08-19T16:50:54.746,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/195. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:54.746,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",195,active,0} [ns_server:debug,2014-08-19T16:50:54.753,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,744909}, tap_estimate, {replica_building,"default",652,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25650.0>, <<"replication_building_652_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:54.754,ns_1@10.242.238.88:<0.14790.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25650.0>}, {'ns_1@10.242.238.89',<18124.30001.0>}]) [rebalance:info,2014-08-19T16:50:54.754,ns_1@10.242.238.88:<0.14781.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:54.755,ns_1@10.242.238.88:<0.14781.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 652 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:54.755,ns_1@10.242.238.88:<0.14781.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:54.756,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{652, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:54.759,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{396, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:54.759,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",396, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.14802.1>) [ns_server:debug,2014-08-19T16:50:54.759,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 396) [ns_server:debug,2014-08-19T16:50:54.760,ns_1@10.242.238.88:<0.14803.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:54.760,ns_1@10.242.238.88:<0.14803.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:54.760,ns_1@10.242.238.88:<0.14802.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 396 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:54.760,ns_1@10.242.238.88:<0.14808.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 396 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:54.760,ns_1@10.242.238.88:<0.14809.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 396 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:54.764,ns_1@10.242.238.88:<0.14810.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 396 into 'ns_1@10.242.238.90' is <18125.25656.0> [ns_server:debug,2014-08-19T16:50:54.766,ns_1@10.242.238.88:<0.14810.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 396 into 'ns_1@10.242.238.89' is <18124.30006.0> [rebalance:debug,2014-08-19T16:50:54.766,ns_1@10.242.238.88:<0.14802.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 396 is <0.14810.1> [ns_server:debug,2014-08-19T16:50:54.792,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,783106}, tap_estimate, {replica_building,"default",396,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25656.0>, <<"replication_building_396_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:54.807,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,798001}, tap_estimate, {replica_building,"default",396,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30006.0>, <<"replication_building_396_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:54.807,ns_1@10.242.238.88:<0.14811.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.30006.0>}, {'ns_1@10.242.238.90',<18125.25656.0>}]) [rebalance:info,2014-08-19T16:50:54.807,ns_1@10.242.238.88:<0.14802.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:54.808,ns_1@10.242.238.88:<0.14802.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 396 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:54.808,ns_1@10.242.238.88:<0.14802.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:54.809,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{396, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:54.812,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{905, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:54.812,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",905, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.14823.1>) [ns_server:debug,2014-08-19T16:50:54.812,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 905) [ns_server:debug,2014-08-19T16:50:54.813,ns_1@10.242.238.88:<0.14824.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:54.813,ns_1@10.242.238.88:<0.14824.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:54.813,ns_1@10.242.238.88:<0.14823.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 905 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:54.813,ns_1@10.242.238.88:<0.14829.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 905 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:54.813,ns_1@10.242.238.88:<0.14830.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 905 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:54.817,ns_1@10.242.238.88:<0.14831.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 905 into 'ns_1@10.242.238.89' is <18124.30012.0> [ns_server:debug,2014-08-19T16:50:54.820,ns_1@10.242.238.88:<0.14831.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 905 into 'ns_1@10.242.238.91' is <18126.27474.0> [rebalance:debug,2014-08-19T16:50:54.820,ns_1@10.242.238.88:<0.14823.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 905 is <0.14831.1> [ns_server:debug,2014-08-19T16:50:54.845,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,836929}, tap_estimate, {replica_building,"default",905,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30012.0>, <<"replication_building_905_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:54.859,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,850662}, tap_estimate, {replica_building,"default",905,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27474.0>, <<"replication_building_905_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:54.860,ns_1@10.242.238.88:<0.14840.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27474.0>}, {'ns_1@10.242.238.89',<18124.30012.0>}]) [rebalance:info,2014-08-19T16:50:54.860,ns_1@10.242.238.88:<0.14823.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:54.861,ns_1@10.242.238.88:<0.14823.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 905 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:54.861,ns_1@10.242.238.88:<0.14823.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:54.862,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{905, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:54.865,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{651, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:54.865,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",651, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.14858.1>) [ns_server:debug,2014-08-19T16:50:54.865,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 651) [ns_server:debug,2014-08-19T16:50:54.866,ns_1@10.242.238.88:<0.14859.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:54.866,ns_1@10.242.238.88:<0.14859.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:54.866,ns_1@10.242.238.88:<0.14858.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 651 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:54.866,ns_1@10.242.238.88:<0.14864.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 651 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:54.866,ns_1@10.242.238.88:<0.14865.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 651 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:54.870,ns_1@10.242.238.88:<0.14866.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 651 into 'ns_1@10.242.238.89' is <18124.30025.0> [ns_server:debug,2014-08-19T16:50:54.873,ns_1@10.242.238.88:<0.14866.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 651 into 'ns_1@10.242.238.90' is <18125.25675.0> [rebalance:debug,2014-08-19T16:50:54.873,ns_1@10.242.238.88:<0.14858.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 651 is <0.14866.1> [ns_server:debug,2014-08-19T16:50:54.888,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 193. Nacking mccouch update. [views:debug,2014-08-19T16:50:54.888,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/193. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:54.888,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",193,active,0} [ns_server:debug,2014-08-19T16:50:54.890,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562,507,196, 796,741,430,975,664,353,898,770,715,587,404,276,221,949,821,766,638,455,327, 144,872,689,561,506,378,195,1000,923,795,740,612,429,301,246,118,974,846,663, 535,480,352,897,769,714,586,403,275,220,948,820,765,637,454,326,999,871,688, 560,505,377,194,922,794,739,611,428,300,245,973,845,662,534,479,351,168,896, 768,713,585,402,274,219,947,819,764,636,453,325,142,998,870,687,559,504,376, 193,921,793,738,610,427,299,244,116,972,844,661,533,478,350,895,712,584,401, 273,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737, 609,426,298,243,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945, 817,762,634,451,323,140,996,868,685,557,502,374,919,791,736,608,425,297,242, 114,970,842,659,531,476,348,893,710,582,399,271,216,1021,944,816,761,633,450, 322,995,867,684,556,501,373,190,918,790,735,607,424,296,241,969,841,658,530, 475,347,164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866, 683,555,500,372,917,789,734,606,423,295,240,112,968,840,657,529,474,346,891, 708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188, 916,788,733,605,422,294,239,967,839,656,528,473,345,162,890,707,579,396,268, 213,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,915,787,732,604, 421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017,940,812, 757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965, 837,654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317, 134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836,653,525, 470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678,550, 495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703, 575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,911, 783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208,1013, 936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288, 233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649, 521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674, 546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337,154,882, 699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230,830,519, 464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880, 569,258,203,803,748,437,126,982,671,360,905,594,283,228,828,517,462,696,385, 1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333,878,567,256,201, 801,746,435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928,617, 306,251,851,540,485,174,774,719,408,953,642,331,876,565,510,199,799,744,433, 122,978,667,356,901,590,279,224,824,513,458,692,381,1003,926,615,304,249,849, 538,483,172,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665, 354,899,588,277,222,822,767,456,690,379,1001,924,613,302,247,847,536,481,170] [ns_server:debug,2014-08-19T16:50:54.899,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,890963}, tap_estimate, {replica_building,"default",651,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30025.0>, <<"replication_building_651_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:54.912,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,903804}, tap_estimate, {replica_building,"default",651,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25675.0>, <<"replication_building_651_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:54.913,ns_1@10.242.238.88:<0.14867.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25675.0>}, {'ns_1@10.242.238.89',<18124.30025.0>}]) [rebalance:info,2014-08-19T16:50:54.913,ns_1@10.242.238.88:<0.14858.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:54.914,ns_1@10.242.238.88:<0.14858.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 651 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:54.914,ns_1@10.242.238.88:<0.14858.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:54.915,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{651, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:54.918,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{395, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:54.918,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",395, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.14879.1>) [ns_server:debug,2014-08-19T16:50:54.918,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 395) [ns_server:debug,2014-08-19T16:50:54.919,ns_1@10.242.238.88:<0.14880.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:54.919,ns_1@10.242.238.88:<0.14880.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:54.919,ns_1@10.242.238.88:<0.14879.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 395 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:54.919,ns_1@10.242.238.88:<0.14885.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 395 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:54.919,ns_1@10.242.238.88:<0.14886.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 395 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:54.923,ns_1@10.242.238.88:<0.14887.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 395 into 'ns_1@10.242.238.90' is <18125.25681.0> [ns_server:debug,2014-08-19T16:50:54.925,ns_1@10.242.238.88:<0.14887.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 395 into 'ns_1@10.242.238.89' is <18124.30044.0> [rebalance:debug,2014-08-19T16:50:54.925,ns_1@10.242.238.88:<0.14879.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 395 is <0.14887.1> [ns_server:debug,2014-08-19T16:50:54.951,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,942555}, tap_estimate, {replica_building,"default",395,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25681.0>, <<"replication_building_395_'ns_1@10.242.238.90'">>} [views:debug,2014-08-19T16:50:54.955,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/193. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:54.955,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",193,active,0} [ns_server:debug,2014-08-19T16:50:54.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,958334}, tap_estimate, {replica_building,"default",395,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30044.0>, <<"replication_building_395_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:54.967,ns_1@10.242.238.88:<0.14888.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.30044.0>}, {'ns_1@10.242.238.90',<18125.25681.0>}]) [rebalance:info,2014-08-19T16:50:54.967,ns_1@10.242.238.88:<0.14879.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:54.969,ns_1@10.242.238.88:<0.14879.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 395 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:54.969,ns_1@10.242.238.88:<0.14879.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:54.970,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{395, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:54.973,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{904, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:54.973,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",904, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.14900.1>) [ns_server:debug,2014-08-19T16:50:54.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 904) [ns_server:debug,2014-08-19T16:50:54.974,ns_1@10.242.238.88:<0.14901.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:54.974,ns_1@10.242.238.88:<0.14901.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:54.974,ns_1@10.242.238.88:<0.14900.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 904 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:54.974,ns_1@10.242.238.88:<0.14906.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 904 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:54.974,ns_1@10.242.238.88:<0.14907.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 904 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:54.979,ns_1@10.242.238.88:<0.14908.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 904 into 'ns_1@10.242.238.89' is <18124.30053.0> [ns_server:debug,2014-08-19T16:50:54.981,ns_1@10.242.238.88:<0.14908.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 904 into 'ns_1@10.242.238.91' is <18126.27494.0> [rebalance:debug,2014-08-19T16:50:54.982,ns_1@10.242.238.88:<0.14900.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 904 is <0.14908.1> [ns_server:debug,2014-08-19T16:50:55.007,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452654,998612}, tap_estimate, {replica_building,"default",904,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30053.0>, <<"replication_building_904_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:55.019,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,10813}, tap_estimate, {replica_building,"default",904,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27494.0>, <<"replication_building_904_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:55.020,ns_1@10.242.238.88:<0.14909.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27494.0>}, {'ns_1@10.242.238.89',<18124.30053.0>}]) [rebalance:info,2014-08-19T16:50:55.020,ns_1@10.242.238.88:<0.14900.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:55.020,ns_1@10.242.238.88:<0.14900.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 904 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:55.021,ns_1@10.242.238.88:<0.14900.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:55.021,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{904, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:55.025,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{650, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:55.025,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",650, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.14935.1>) [ns_server:debug,2014-08-19T16:50:55.025,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 650) [ns_server:debug,2014-08-19T16:50:55.025,ns_1@10.242.238.88:<0.14936.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:55.025,ns_1@10.242.238.88:<0.14936.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:55.026,ns_1@10.242.238.88:<0.14935.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 650 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:55.026,ns_1@10.242.238.88:<0.14941.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 650 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:55.026,ns_1@10.242.238.88:<0.14942.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 650 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:55.030,ns_1@10.242.238.88:<0.14943.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 650 into 'ns_1@10.242.238.89' is <18124.30058.0> [ns_server:debug,2014-08-19T16:50:55.032,ns_1@10.242.238.88:<0.14943.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 650 into 'ns_1@10.242.238.90' is <18125.25686.0> [rebalance:debug,2014-08-19T16:50:55.032,ns_1@10.242.238.88:<0.14935.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 650 is <0.14943.1> [ns_server:debug,2014-08-19T16:50:55.053,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 191. Nacking mccouch update. [views:debug,2014-08-19T16:50:55.053,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/191. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:55.054,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",191,active,0} [ns_server:debug,2014-08-19T16:50:55.057,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562,507,196, 796,741,430,975,664,353,898,770,715,587,404,276,221,949,821,766,638,455,327, 144,872,689,561,506,378,195,1000,923,795,740,612,429,301,246,118,974,846,663, 535,480,352,897,769,714,586,403,275,220,948,820,765,637,454,326,999,871,688, 560,505,377,194,922,794,739,611,428,300,245,973,845,662,534,479,351,168,896, 768,713,585,402,274,219,947,819,764,636,453,325,142,998,870,687,559,504,376, 193,921,793,738,610,427,299,244,116,972,844,661,533,478,350,895,712,584,401, 273,218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737, 609,426,298,243,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945, 817,762,634,451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297, 242,114,970,842,659,531,476,348,893,710,582,399,271,216,1021,944,816,761,633, 450,322,995,867,684,556,501,373,190,918,790,735,607,424,296,241,969,841,658, 530,475,347,164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138,994, 866,683,555,500,372,917,789,734,606,423,295,240,112,968,840,657,529,474,346, 891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371, 188,916,788,733,605,422,294,239,967,839,656,528,473,345,162,890,707,579,396, 268,213,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,915,787,732, 604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017,940, 812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237, 965,837,654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445, 317,134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836,653, 525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678, 550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886, 703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752, 624,441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832, 649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857, 674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337,154, 882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230,830, 519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335, 880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828,517,462,696, 385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333,878,567,256, 201,801,746,435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928, 617,306,251,851,540,485,174,774,719,408,953,642,331,876,565,510,199,799,744, 433,122,978,667,356,901,590,279,224,824,513,458,692,381,1003,926,615,304,249, 849,538,483,172,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976, 665,354,899,588,277,222,822,767,456,690,379,1001,924,613,302,247,847,536,481, 170] [ns_server:debug,2014-08-19T16:50:55.058,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,49270}, tap_estimate, {replica_building,"default",650,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30058.0>, <<"replication_building_650_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:55.072,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,63570}, tap_estimate, {replica_building,"default",650,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25686.0>, <<"replication_building_650_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:55.073,ns_1@10.242.238.88:<0.14944.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25686.0>}, {'ns_1@10.242.238.89',<18124.30058.0>}]) [rebalance:info,2014-08-19T16:50:55.073,ns_1@10.242.238.88:<0.14935.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:55.073,ns_1@10.242.238.88:<0.14935.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 650 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:55.074,ns_1@10.242.238.88:<0.14935.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:55.074,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{650, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:55.078,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{394, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:55.078,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",394, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.14956.1>) [ns_server:debug,2014-08-19T16:50:55.078,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 394) [ns_server:debug,2014-08-19T16:50:55.078,ns_1@10.242.238.88:<0.14957.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:55.079,ns_1@10.242.238.88:<0.14957.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:55.079,ns_1@10.242.238.88:<0.14956.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 394 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:55.079,ns_1@10.242.238.88:<0.14962.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 394 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:55.079,ns_1@10.242.238.88:<0.14963.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 394 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:55.083,ns_1@10.242.238.88:<0.14964.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 394 into 'ns_1@10.242.238.90' is <18125.25706.0> [ns_server:debug,2014-08-19T16:50:55.086,ns_1@10.242.238.88:<0.14964.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 394 into 'ns_1@10.242.238.89' is <18124.30063.0> [rebalance:debug,2014-08-19T16:50:55.086,ns_1@10.242.238.88:<0.14956.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 394 is <0.14964.1> [views:debug,2014-08-19T16:50:55.087,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/191. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:55.087,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",191,active,0} [ns_server:debug,2014-08-19T16:50:55.119,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,110072}, tap_estimate, {replica_building,"default",394,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25706.0>, <<"replication_building_394_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:55.126,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,117481}, tap_estimate, {replica_building,"default",394,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30063.0>, <<"replication_building_394_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:55.126,ns_1@10.242.238.88:<0.14965.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.30063.0>}, {'ns_1@10.242.238.90',<18125.25706.0>}]) [rebalance:info,2014-08-19T16:50:55.127,ns_1@10.242.238.88:<0.14956.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:55.127,ns_1@10.242.238.88:<0.14956.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 394 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:55.128,ns_1@10.242.238.88:<0.14956.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:55.128,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{394, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:55.132,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{903, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:55.133,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",903, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.14991.1>) [ns_server:debug,2014-08-19T16:50:55.133,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 903) [ns_server:debug,2014-08-19T16:50:55.133,ns_1@10.242.238.88:<0.14992.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:55.133,ns_1@10.242.238.88:<0.14992.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:55.133,ns_1@10.242.238.88:<0.14991.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 903 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:55.134,ns_1@10.242.238.88:<0.14997.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 903 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:55.134,ns_1@10.242.238.88:<0.14998.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 903 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:55.138,ns_1@10.242.238.88:<0.14999.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 903 into 'ns_1@10.242.238.89' is <18124.30069.0> [ns_server:debug,2014-08-19T16:50:55.141,ns_1@10.242.238.88:<0.14999.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 903 into 'ns_1@10.242.238.91' is <18126.27514.0> [rebalance:debug,2014-08-19T16:50:55.141,ns_1@10.242.238.88:<0.14991.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 903 is <0.14999.1> [ns_server:debug,2014-08-19T16:50:55.163,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 189. Nacking mccouch update. [views:debug,2014-08-19T16:50:55.163,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/189. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:55.163,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",189,active,0} [ns_server:debug,2014-08-19T16:50:55.165,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562,507,196, 796,741,430,975,664,353,898,587,276,221,949,821,766,638,455,327,144,872,689, 561,506,378,195,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352, 897,769,714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377, 194,922,794,739,611,428,300,245,973,845,662,534,479,351,168,896,768,713,585, 402,274,219,947,819,764,636,453,325,142,998,870,687,559,504,376,193,921,793, 738,610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,273,218,1023, 946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298, 243,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817,762,634, 451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,893,710,582,399,271,216,1021,944,816,761,633,450,322,995, 867,684,556,501,373,190,918,790,735,607,424,296,241,969,841,658,530,475,347, 164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,555, 500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,891,708, 580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916, 788,733,605,422,294,239,967,839,656,528,473,345,162,890,707,579,396,268,213, 1018,941,813,758,630,447,319,136,992,864,681,553,498,370,915,787,732,604,421, 293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017,940,812,757, 629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837, 654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134, 990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836,653,525,470, 342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678,550,495, 367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703,575, 392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,911,783, 728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208,1013,936, 808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,233, 961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752,624,441, 313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649,521, 466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674,546, 491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337,154,882,699, 571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230,830,519,464, 698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880,569, 258,203,803,748,437,126,982,671,360,905,594,283,228,828,517,462,696,385,1007, 930,619,308,253,853,542,487,176,776,721,410,955,644,333,878,567,256,201,801, 746,435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928,617,306, 251,851,540,485,174,774,719,408,953,642,331,876,565,510,199,799,744,433,122, 978,667,356,901,590,279,224,824,513,458,692,381,1003,926,615,304,249,849,538, 483,172,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,354, 899,588,277,222,822,767,456,690,379,1001,924,613,302,247,847,536,481,170,770, 715,404] [ns_server:debug,2014-08-19T16:50:55.168,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,159213}, tap_estimate, {replica_building,"default",903,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30069.0>, <<"replication_building_903_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:55.185,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,176459}, tap_estimate, {replica_building,"default",903,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27514.0>, <<"replication_building_903_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:55.185,ns_1@10.242.238.88:<0.15000.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27514.0>}, {'ns_1@10.242.238.89',<18124.30069.0>}]) [rebalance:info,2014-08-19T16:50:55.186,ns_1@10.242.238.88:<0.14991.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:55.186,ns_1@10.242.238.88:<0.14991.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 903 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:55.187,ns_1@10.242.238.88:<0.14991.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:55.187,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{903, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:55.190,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{649, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:55.190,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",649, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.15017.1>) [ns_server:debug,2014-08-19T16:50:55.190,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 649) [ns_server:debug,2014-08-19T16:50:55.191,ns_1@10.242.238.88:<0.15018.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:55.191,ns_1@10.242.238.88:<0.15018.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:55.191,ns_1@10.242.238.88:<0.15017.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 649 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:55.191,ns_1@10.242.238.88:<0.15023.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 649 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:55.191,ns_1@10.242.238.88:<0.15024.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 649 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [views:debug,2014-08-19T16:50:55.197,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/189. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:55.197,ns_1@10.242.238.88:<0.15025.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 649 into 'ns_1@10.242.238.89' is <18124.30088.0> [ns_server:debug,2014-08-19T16:50:55.197,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",189,active,0} [ns_server:debug,2014-08-19T16:50:55.199,ns_1@10.242.238.88:<0.15025.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 649 into 'ns_1@10.242.238.90' is <18125.25711.0> [rebalance:debug,2014-08-19T16:50:55.199,ns_1@10.242.238.88:<0.15017.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 649 is <0.15025.1> [ns_server:debug,2014-08-19T16:50:55.225,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,216707}, tap_estimate, {replica_building,"default",649,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30088.0>, <<"replication_building_649_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:55.243,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,234530}, tap_estimate, {replica_building,"default",649,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25711.0>, <<"replication_building_649_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:55.243,ns_1@10.242.238.88:<0.15026.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25711.0>}, {'ns_1@10.242.238.89',<18124.30088.0>}]) [rebalance:info,2014-08-19T16:50:55.244,ns_1@10.242.238.88:<0.15017.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:55.244,ns_1@10.242.238.88:<0.15017.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 649 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:55.245,ns_1@10.242.238.88:<0.15017.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:55.245,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{649, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:55.249,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{393, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:55.249,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",393, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.15052.1>) [ns_server:debug,2014-08-19T16:50:55.249,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 393) [ns_server:debug,2014-08-19T16:50:55.249,ns_1@10.242.238.88:<0.15053.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:55.249,ns_1@10.242.238.88:<0.15053.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:55.250,ns_1@10.242.238.88:<0.15052.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 393 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:55.250,ns_1@10.242.238.88:<0.15058.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 393 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:55.250,ns_1@10.242.238.88:<0.15059.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 393 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:55.253,ns_1@10.242.238.88:<0.15060.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 393 into 'ns_1@10.242.238.90' is <18125.25717.0> [ns_server:debug,2014-08-19T16:50:55.256,ns_1@10.242.238.88:<0.15060.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 393 into 'ns_1@10.242.238.89' is <18124.30093.0> [rebalance:debug,2014-08-19T16:50:55.256,ns_1@10.242.238.88:<0.15052.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 393 is <0.15060.1> [ns_server:debug,2014-08-19T16:50:55.280,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 187. Nacking mccouch update. [views:debug,2014-08-19T16:50:55.280,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/187. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:55.280,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",187,active,0} [ns_server:debug,2014-08-19T16:50:55.282,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,273779}, tap_estimate, {replica_building,"default",393,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25717.0>, <<"replication_building_393_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:55.282,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562,507,196, 796,741,430,975,664,353,898,587,276,221,949,821,766,638,455,327,144,872,689, 561,506,378,195,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352, 897,769,714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377, 194,922,794,739,611,428,300,245,973,845,662,534,479,351,168,896,768,713,585, 402,274,219,947,819,764,636,453,325,142,998,870,687,559,504,376,193,921,793, 738,610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,273,218,1023, 946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298, 243,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817,762,634, 451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,893,710,582,399,271,216,1021,944,816,761,633,450,322,995, 867,684,556,501,373,190,918,790,735,607,424,296,241,969,841,658,530,475,347, 164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,555, 500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,891,708, 580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916, 788,733,605,422,294,239,967,839,656,528,473,345,162,890,707,579,396,268,213, 1018,941,813,758,630,447,319,136,992,864,681,553,498,370,187,915,787,732,604, 421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017,940,812, 757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965, 837,654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317, 134,990,862,679,551,496,368,913,785,730,602,419,291,236,108,964,836,653,525, 470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678,550, 495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703, 575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,911, 783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208,1013, 936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288, 233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832,649, 521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857,674, 546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337,154,882, 699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230,830,519, 464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335,880, 569,258,203,803,748,437,126,982,671,360,905,594,283,228,828,517,462,696,385, 1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333,878,567,256,201, 801,746,435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928,617, 306,251,851,540,485,174,774,719,408,953,642,331,876,565,510,199,799,744,433, 122,978,667,356,901,590,279,224,824,513,458,692,381,1003,926,615,304,249,849, 538,483,172,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665, 354,899,588,277,222,822,767,456,690,379,1001,924,613,302,247,847,536,481,170, 770,715,404] [ns_server:debug,2014-08-19T16:50:55.294,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,285684}, tap_estimate, {replica_building,"default",393,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30093.0>, <<"replication_building_393_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:55.295,ns_1@10.242.238.88:<0.15061.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.30093.0>}, {'ns_1@10.242.238.90',<18125.25717.0>}]) [rebalance:info,2014-08-19T16:50:55.295,ns_1@10.242.238.88:<0.15052.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:55.295,ns_1@10.242.238.88:<0.15052.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 393 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:55.296,ns_1@10.242.238.88:<0.15052.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:55.296,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{393, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:55.299,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{902, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:55.300,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",902, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.15073.1>) [ns_server:debug,2014-08-19T16:50:55.300,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 902) [ns_server:debug,2014-08-19T16:50:55.300,ns_1@10.242.238.88:<0.15074.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:55.300,ns_1@10.242.238.88:<0.15074.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:55.300,ns_1@10.242.238.88:<0.15073.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 902 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:55.300,ns_1@10.242.238.88:<0.15079.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 902 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:55.300,ns_1@10.242.238.88:<0.15080.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 902 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:55.304,ns_1@10.242.238.88:<0.15081.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 902 into 'ns_1@10.242.238.89' is <18124.30099.0> [ns_server:debug,2014-08-19T16:50:55.307,ns_1@10.242.238.88:<0.15081.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 902 into 'ns_1@10.242.238.91' is <18126.27520.0> [rebalance:debug,2014-08-19T16:50:55.307,ns_1@10.242.238.88:<0.15073.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 902 is <0.15081.1> [views:debug,2014-08-19T16:50:55.322,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/187. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:55.322,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",187,active,0} [ns_server:debug,2014-08-19T16:50:55.334,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,325021}, tap_estimate, {replica_building,"default",902,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30099.0>, <<"replication_building_902_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:55.347,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,338774}, tap_estimate, {replica_building,"default",902,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27520.0>, <<"replication_building_902_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:55.348,ns_1@10.242.238.88:<0.15082.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27520.0>}, {'ns_1@10.242.238.89',<18124.30099.0>}]) [rebalance:info,2014-08-19T16:50:55.348,ns_1@10.242.238.88:<0.15073.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:55.348,ns_1@10.242.238.88:<0.15073.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 902 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:55.349,ns_1@10.242.238.88:<0.15073.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:55.350,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{902, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:55.353,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{648, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:55.353,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",648, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.15094.1>) [ns_server:debug,2014-08-19T16:50:55.353,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 648) [ns_server:debug,2014-08-19T16:50:55.353,ns_1@10.242.238.88:<0.15095.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:55.354,ns_1@10.242.238.88:<0.15095.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:55.354,ns_1@10.242.238.88:<0.15094.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 648 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:55.354,ns_1@10.242.238.88:<0.15100.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 648 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:55.354,ns_1@10.242.238.88:<0.15101.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 648 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:55.358,ns_1@10.242.238.88:<0.15102.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 648 into 'ns_1@10.242.238.89' is <18124.30104.0> [ns_server:debug,2014-08-19T16:50:55.361,ns_1@10.242.238.88:<0.15102.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 648 into 'ns_1@10.242.238.90' is <18125.25736.0> [rebalance:debug,2014-08-19T16:50:55.361,ns_1@10.242.238.88:<0.15094.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 648 is <0.15102.1> [ns_server:debug,2014-08-19T16:50:55.387,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,378275}, tap_estimate, {replica_building,"default",648,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30104.0>, <<"replication_building_648_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:55.398,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 185. Nacking mccouch update. [views:debug,2014-08-19T16:50:55.398,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/185. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:55.398,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",185,active,0} [ns_server:debug,2014-08-19T16:50:55.400,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,391842}, tap_estimate, {replica_building,"default",648,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25736.0>, <<"replication_building_648_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:55.400,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562,507,196, 796,741,430,975,664,353,898,587,276,221,949,821,766,638,455,327,144,872,689, 561,506,378,195,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352, 897,769,714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377, 194,922,794,739,611,428,300,245,973,845,662,534,479,351,168,896,768,713,585, 402,274,219,947,819,764,636,453,325,142,998,870,687,559,504,376,193,921,793, 738,610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,273,218,1023, 946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298, 243,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817,762,634, 451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,893,710,582,399,271,216,1021,944,816,761,633,450,322,995, 867,684,556,501,373,190,918,790,735,607,424,296,241,969,841,658,530,475,347, 164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,555, 500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,891,708, 580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916, 788,733,605,422,294,239,967,839,656,528,473,345,162,890,707,579,396,268,213, 1018,941,813,758,630,447,319,136,992,864,681,553,498,370,187,915,787,732,604, 421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017,940,812, 757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965, 837,654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317, 134,990,862,679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653, 525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678, 550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886, 703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752, 624,441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960,832, 649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857, 674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337,154, 882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230,830, 519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335, 880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828,517,462,696, 385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333,878,567,256, 201,801,746,435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928, 617,306,251,851,540,485,174,774,719,408,953,642,331,876,565,510,199,799,744, 433,122,978,667,356,901,590,279,224,824,513,458,692,381,1003,926,615,304,249, 849,538,483,172,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976, 665,354,899,588,277,222,822,767,456,690,379,1001,924,613,302,247,847,536,481, 170,770,715,404] [ns_server:debug,2014-08-19T16:50:55.401,ns_1@10.242.238.88:<0.15108.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25736.0>}, {'ns_1@10.242.238.89',<18124.30104.0>}]) [rebalance:info,2014-08-19T16:50:55.401,ns_1@10.242.238.88:<0.15094.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:55.402,ns_1@10.242.238.88:<0.15094.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 648 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:55.402,ns_1@10.242.238.88:<0.15094.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:55.403,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{648, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:55.406,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{392, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:55.406,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",392, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.15129.1>) [ns_server:debug,2014-08-19T16:50:55.406,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 392) [ns_server:debug,2014-08-19T16:50:55.406,ns_1@10.242.238.88:<0.15130.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:55.407,ns_1@10.242.238.88:<0.15130.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:55.407,ns_1@10.242.238.88:<0.15129.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 392 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:55.407,ns_1@10.242.238.88:<0.15135.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 392 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:55.407,ns_1@10.242.238.88:<0.15136.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 392 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:55.411,ns_1@10.242.238.88:<0.15137.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 392 into 'ns_1@10.242.238.90' is <18125.25742.0> [ns_server:debug,2014-08-19T16:50:55.413,ns_1@10.242.238.88:<0.15137.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 392 into 'ns_1@10.242.238.89' is <18124.30123.0> [rebalance:debug,2014-08-19T16:50:55.413,ns_1@10.242.238.88:<0.15129.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 392 is <0.15137.1> [views:debug,2014-08-19T16:50:55.431,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/185. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:55.432,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",185,active,0} [ns_server:debug,2014-08-19T16:50:55.439,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,430302}, tap_estimate, {replica_building,"default",392,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25742.0>, <<"replication_building_392_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:55.452,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,443440}, tap_estimate, {replica_building,"default",392,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30123.0>, <<"replication_building_392_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:55.452,ns_1@10.242.238.88:<0.15138.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.30123.0>}, {'ns_1@10.242.238.90',<18125.25742.0>}]) [rebalance:info,2014-08-19T16:50:55.453,ns_1@10.242.238.88:<0.15129.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:55.453,ns_1@10.242.238.88:<0.15129.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 392 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:55.454,ns_1@10.242.238.88:<0.15129.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:55.454,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{392, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:55.457,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{901, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:55.457,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",901, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.15150.1>) [ns_server:debug,2014-08-19T16:50:55.457,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 901) [ns_server:debug,2014-08-19T16:50:55.458,ns_1@10.242.238.88:<0.15151.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:55.458,ns_1@10.242.238.88:<0.15151.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:55.458,ns_1@10.242.238.88:<0.15150.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 901 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:55.458,ns_1@10.242.238.88:<0.15156.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 901 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:55.458,ns_1@10.242.238.88:<0.15157.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 901 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:55.462,ns_1@10.242.238.88:<0.15158.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 901 into 'ns_1@10.242.238.89' is <18124.30129.0> [ns_server:debug,2014-08-19T16:50:55.464,ns_1@10.242.238.88:<0.15158.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 901 into 'ns_1@10.242.238.91' is <18126.27540.0> [rebalance:debug,2014-08-19T16:50:55.464,ns_1@10.242.238.88:<0.15150.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 901 is <0.15158.1> [ns_server:debug,2014-08-19T16:50:55.491,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,482165}, tap_estimate, {replica_building,"default",901,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30129.0>, <<"replication_building_901_'ns_1@10.242.238.89'">>} [ns_server:info,2014-08-19T16:50:55.492,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_pull:341]Pulling config from: 'ns_1@10.242.238.89' [ns_server:debug,2014-08-19T16:50:55.504,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,495357}, tap_estimate, {replica_building,"default",901,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27540.0>, <<"replication_building_901_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:55.504,ns_1@10.242.238.88:<0.15159.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27540.0>}, {'ns_1@10.242.238.89',<18124.30129.0>}]) [rebalance:info,2014-08-19T16:50:55.505,ns_1@10.242.238.88:<0.15150.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:55.505,ns_1@10.242.238.88:<0.15150.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 901 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:55.506,ns_1@10.242.238.88:<0.15150.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:55.506,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{901, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:55.509,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{647, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:55.510,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",647, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.15188.1>) [ns_server:debug,2014-08-19T16:50:55.510,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 647) [ns_server:debug,2014-08-19T16:50:55.510,ns_1@10.242.238.88:<0.15189.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:55.510,ns_1@10.242.238.88:<0.15189.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:55.510,ns_1@10.242.238.88:<0.15188.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 647 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:55.510,ns_1@10.242.238.88:<0.15194.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 647 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:55.510,ns_1@10.242.238.88:<0.15195.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 647 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:55.514,ns_1@10.242.238.88:<0.15196.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 647 into 'ns_1@10.242.238.89' is <18124.30134.0> [ns_server:debug,2014-08-19T16:50:55.516,ns_1@10.242.238.88:<0.15196.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 647 into 'ns_1@10.242.238.90' is <18125.25769.0> [rebalance:debug,2014-08-19T16:50:55.516,ns_1@10.242.238.88:<0.15188.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 647 is <0.15196.1> [ns_server:debug,2014-08-19T16:50:55.521,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 183. Nacking mccouch update. [views:debug,2014-08-19T16:50:55.522,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/183. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:55.522,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",183,active,0} [ns_server:debug,2014-08-19T16:50:55.524,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562,507,196, 796,741,430,975,664,353,898,587,276,221,949,821,766,638,455,327,144,872,689, 561,506,378,195,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352, 897,769,714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377, 194,922,794,739,611,428,300,245,973,845,662,534,479,351,168,896,768,713,585, 402,274,219,947,819,764,636,453,325,142,998,870,687,559,504,376,193,921,793, 738,610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,273,218,1023, 946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298, 243,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817,762,634, 451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,893,710,582,399,271,216,1021,944,816,761,633,450,322,995, 867,684,556,501,373,190,918,790,735,607,424,296,241,969,841,658,530,475,347, 164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,555, 500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,891,708, 580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916, 788,733,605,422,294,239,967,839,656,528,473,345,162,890,707,579,396,268,213, 1018,941,813,758,630,447,319,136,992,864,681,553,498,370,187,915,787,732,604, 421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017,940,812, 757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965, 837,654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317, 134,990,862,679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653, 525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678, 550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886, 703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 183,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263, 208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599, 416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807, 752,624,441,313,130,986,858,675,547,492,364,909,781,726,598,415,287,232,960, 832,649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985, 857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337, 154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230, 830,519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646, 335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828,517,462, 696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333,878,567, 256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005, 928,617,306,251,851,540,485,174,774,719,408,953,642,331,876,565,510,199,799, 744,433,122,978,667,356,901,590,279,224,824,513,458,692,381,1003,926,615,304, 249,849,538,483,172,772,717,406,951,640,329,874,563,508,197,797,742,431,120, 976,665,354,899,588,277,222,822,767,456,690,379,1001,924,613,302,247,847,536, 481,170,770,715,404] [ns_server:debug,2014-08-19T16:50:55.546,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,537730}, tap_estimate, {replica_building,"default",647,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30134.0>, <<"replication_building_647_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:55.556,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,546987}, tap_estimate, {replica_building,"default",647,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25769.0>, <<"replication_building_647_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:55.556,ns_1@10.242.238.88:<0.15197.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25769.0>}, {'ns_1@10.242.238.89',<18124.30134.0>}]) [rebalance:info,2014-08-19T16:50:55.556,ns_1@10.242.238.88:<0.15188.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:55.557,ns_1@10.242.238.88:<0.15188.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 647 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:55.557,ns_1@10.242.238.88:<0.15188.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:55.558,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{647, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:55.561,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{391, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:55.561,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",391, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.15209.1>) [ns_server:debug,2014-08-19T16:50:55.561,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 391) [ns_server:debug,2014-08-19T16:50:55.561,ns_1@10.242.238.88:<0.15210.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:55.561,ns_1@10.242.238.88:<0.15210.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:55.562,ns_1@10.242.238.88:<0.15209.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 391 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:55.562,ns_1@10.242.238.88:<0.15215.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 391 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:55.562,ns_1@10.242.238.88:<0.15216.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 391 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:55.565,ns_1@10.242.238.88:<0.15217.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 391 into 'ns_1@10.242.238.90' is <18125.25775.0> [ns_server:debug,2014-08-19T16:50:55.567,ns_1@10.242.238.88:<0.15217.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 391 into 'ns_1@10.242.238.89' is <18124.30153.0> [rebalance:debug,2014-08-19T16:50:55.567,ns_1@10.242.238.88:<0.15209.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 391 is <0.15217.1> [views:debug,2014-08-19T16:50:55.589,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/183. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:55.589,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",183,active,0} [ns_server:debug,2014-08-19T16:50:55.595,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,586352}, tap_estimate, {replica_building,"default",391,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25775.0>, <<"replication_building_391_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:55.606,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,597869}, tap_estimate, {replica_building,"default",391,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30153.0>, <<"replication_building_391_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:55.607,ns_1@10.242.238.88:<0.15218.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.30153.0>}, {'ns_1@10.242.238.90',<18125.25775.0>}]) [rebalance:info,2014-08-19T16:50:55.607,ns_1@10.242.238.88:<0.15209.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:55.608,ns_1@10.242.238.88:<0.15209.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 391 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:55.608,ns_1@10.242.238.88:<0.15209.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:55.609,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{391, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:55.612,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{900, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:55.612,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",900, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.15230.1>) [ns_server:debug,2014-08-19T16:50:55.612,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 900) [ns_server:debug,2014-08-19T16:50:55.613,ns_1@10.242.238.88:<0.15231.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:55.613,ns_1@10.242.238.88:<0.15231.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:55.613,ns_1@10.242.238.88:<0.15230.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 900 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:55.613,ns_1@10.242.238.88:<0.15236.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 900 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:55.613,ns_1@10.242.238.88:<0.15237.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 900 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:55.617,ns_1@10.242.238.88:<0.15238.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 900 into 'ns_1@10.242.238.89' is <18124.30159.0> [ns_server:debug,2014-08-19T16:50:55.619,ns_1@10.242.238.88:<0.15238.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 900 into 'ns_1@10.242.238.91' is <18126.27560.0> [rebalance:debug,2014-08-19T16:50:55.619,ns_1@10.242.238.88:<0.15230.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 900 is <0.15238.1> [ns_server:debug,2014-08-19T16:50:55.646,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,637553}, tap_estimate, {replica_building,"default",900,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30159.0>, <<"replication_building_900_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:55.664,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,655519}, tap_estimate, {replica_building,"default",900,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27560.0>, <<"replication_building_900_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:55.665,ns_1@10.242.238.88:<0.15239.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27560.0>}, {'ns_1@10.242.238.89',<18124.30159.0>}]) [rebalance:info,2014-08-19T16:50:55.665,ns_1@10.242.238.88:<0.15230.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:55.665,ns_1@10.242.238.88:<0.15230.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 900 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:55.666,ns_1@10.242.238.88:<0.15230.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:55.667,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{900, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:55.670,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{646, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:55.670,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",646, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.15251.1>) [ns_server:debug,2014-08-19T16:50:55.670,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 646) [ns_server:debug,2014-08-19T16:50:55.670,ns_1@10.242.238.88:<0.15252.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:55.671,ns_1@10.242.238.88:<0.15252.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:55.671,ns_1@10.242.238.88:<0.15251.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 646 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:55.671,ns_1@10.242.238.88:<0.15257.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 646 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:55.671,ns_1@10.242.238.88:<0.15258.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 646 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:55.676,ns_1@10.242.238.88:<0.15259.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 646 into 'ns_1@10.242.238.89' is <18124.30164.0> [ns_server:debug,2014-08-19T16:50:55.678,ns_1@10.242.238.88:<0.15259.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 646 into 'ns_1@10.242.238.90' is <18125.25780.0> [rebalance:debug,2014-08-19T16:50:55.678,ns_1@10.242.238.88:<0.15251.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 646 is <0.15259.1> [ns_server:debug,2014-08-19T16:50:55.704,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,695791}, tap_estimate, {replica_building,"default",646,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30164.0>, <<"replication_building_646_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:55.716,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,707686}, tap_estimate, {replica_building,"default",646,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25780.0>, <<"replication_building_646_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:55.717,ns_1@10.242.238.88:<0.15268.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25780.0>}, {'ns_1@10.242.238.89',<18124.30164.0>}]) [rebalance:info,2014-08-19T16:50:55.717,ns_1@10.242.238.88:<0.15251.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:55.717,ns_1@10.242.238.88:<0.15251.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 646 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:55.718,ns_1@10.242.238.88:<0.15251.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:55.718,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{646, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:55.721,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{390, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:55.721,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",390, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.15286.1>) [ns_server:debug,2014-08-19T16:50:55.722,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 390) [ns_server:debug,2014-08-19T16:50:55.722,ns_1@10.242.238.88:<0.15287.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:55.722,ns_1@10.242.238.88:<0.15287.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:55.722,ns_1@10.242.238.88:<0.15286.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 390 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:55.723,ns_1@10.242.238.88:<0.15292.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 390 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:55.723,ns_1@10.242.238.88:<0.15293.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 390 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:55.726,ns_1@10.242.238.88:<0.15294.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 390 into 'ns_1@10.242.238.90' is <18125.25786.0> [ns_server:debug,2014-08-19T16:50:55.729,ns_1@10.242.238.88:<0.15294.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 390 into 'ns_1@10.242.238.89' is <18124.30169.0> [rebalance:debug,2014-08-19T16:50:55.729,ns_1@10.242.238.88:<0.15286.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 390 is <0.15294.1> [ns_server:debug,2014-08-19T16:50:55.755,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,746092}, tap_estimate, {replica_building,"default",390,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25786.0>, <<"replication_building_390_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:55.764,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 181. Nacking mccouch update. [views:debug,2014-08-19T16:50:55.764,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/181. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:55.764,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",181,active,0} [ns_server:debug,2014-08-19T16:50:55.766,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,779,724,413,958,647,336,881,570,259,204,804,749,438, 983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254,854, 543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359,904, 593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775,720, 409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225,825, 514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641,330, 875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146,691, 380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562,507,196, 796,741,430,975,664,353,898,587,276,221,949,821,766,638,455,327,144,872,689, 561,506,378,195,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352, 897,769,714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377, 194,922,794,739,611,428,300,245,973,845,662,534,479,351,168,896,768,713,585, 402,274,219,947,819,764,636,453,325,142,998,870,687,559,504,376,193,921,793, 738,610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,273,218,1023, 946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298, 243,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817,762,634, 451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,893,710,582,399,271,216,1021,944,816,761,633,450,322,995, 867,684,556,501,373,190,918,790,735,607,424,296,241,969,841,658,530,475,347, 164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,555, 500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,891,708, 580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916, 788,733,605,422,294,239,967,839,656,528,473,345,162,890,707,579,396,268,213, 1018,941,813,758,630,447,319,136,992,864,681,553,498,370,187,915,787,732,604, 421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017,940,812, 757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965, 837,654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317, 134,990,862,679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653, 525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678, 550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886, 703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 183,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263, 208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599, 416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807, 752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232, 960,832,649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312, 985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465, 337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285, 230,830,519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957, 646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828,517, 462,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333,878, 567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460,694,383, 1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331,876,565,510,199, 799,744,433,122,978,667,356,901,590,279,224,824,513,458,692,381,1003,926,615, 304,249,849,538,483,172,772,717,406,951,640,329,874,563,508,197,797,742,431, 120,976,665,354,899,588,277,222,822,767,456,690,379,1001,924,613,302,247,847, 536,481,170,770,715,404] [ns_server:debug,2014-08-19T16:50:55.767,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,758460}, tap_estimate, {replica_building,"default",390,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30169.0>, <<"replication_building_390_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:55.767,ns_1@10.242.238.88:<0.15295.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.30169.0>}, {'ns_1@10.242.238.90',<18125.25786.0>}]) [rebalance:info,2014-08-19T16:50:55.768,ns_1@10.242.238.88:<0.15286.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:55.768,ns_1@10.242.238.88:<0.15286.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 390 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:55.769,ns_1@10.242.238.88:<0.15286.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:55.769,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{390, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:55.772,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{899, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:55.773,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",899, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.15307.1>) [ns_server:debug,2014-08-19T16:50:55.773,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 899) [ns_server:debug,2014-08-19T16:50:55.773,ns_1@10.242.238.88:<0.15308.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:55.773,ns_1@10.242.238.88:<0.15308.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:55.773,ns_1@10.242.238.88:<0.15307.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 899 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:55.774,ns_1@10.242.238.88:<0.15313.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 899 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:55.774,ns_1@10.242.238.88:<0.15314.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 899 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:55.777,ns_1@10.242.238.88:<0.15315.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 899 into 'ns_1@10.242.238.89' is <18124.30189.0> [ns_server:debug,2014-08-19T16:50:55.780,ns_1@10.242.238.88:<0.15315.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 899 into 'ns_1@10.242.238.91' is <18126.27580.0> [rebalance:debug,2014-08-19T16:50:55.780,ns_1@10.242.238.88:<0.15307.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 899 is <0.15315.1> [ns_server:debug,2014-08-19T16:50:55.806,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,797277}, tap_estimate, {replica_building,"default",899,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30189.0>, <<"replication_building_899_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:55.823,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,814600}, tap_estimate, {replica_building,"default",899,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27580.0>, <<"replication_building_899_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:55.824,ns_1@10.242.238.88:<0.15316.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27580.0>}, {'ns_1@10.242.238.89',<18124.30189.0>}]) [rebalance:info,2014-08-19T16:50:55.824,ns_1@10.242.238.88:<0.15307.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:55.824,ns_1@10.242.238.88:<0.15307.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 899 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:55.825,ns_1@10.242.238.88:<0.15307.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:55.825,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{899, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:55.828,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{645, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:55.829,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",645, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.15328.1>) [ns_server:debug,2014-08-19T16:50:55.829,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 645) [ns_server:debug,2014-08-19T16:50:55.829,ns_1@10.242.238.88:<0.15329.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:55.829,ns_1@10.242.238.88:<0.15329.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:55.830,ns_1@10.242.238.88:<0.15328.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 645 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:55.830,ns_1@10.242.238.88:<0.15334.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 645 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:55.830,ns_1@10.242.238.88:<0.15335.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 645 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:55.835,ns_1@10.242.238.88:<0.15336.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 645 into 'ns_1@10.242.238.89' is <18124.30194.0> [ns_server:debug,2014-08-19T16:50:55.838,ns_1@10.242.238.88:<0.15336.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 645 into 'ns_1@10.242.238.90' is <18125.25805.0> [rebalance:debug,2014-08-19T16:50:55.838,ns_1@10.242.238.88:<0.15328.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 645 is <0.15336.1> [views:debug,2014-08-19T16:50:55.839,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/181. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:55.840,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",181,active,0} [ns_server:debug,2014-08-19T16:50:55.864,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,855673}, tap_estimate, {replica_building,"default",645,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30194.0>, <<"replication_building_645_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:55.877,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,868845}, tap_estimate, {replica_building,"default",645,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25805.0>, <<"replication_building_645_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:55.878,ns_1@10.242.238.88:<0.15337.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25805.0>}, {'ns_1@10.242.238.89',<18124.30194.0>}]) [rebalance:info,2014-08-19T16:50:55.878,ns_1@10.242.238.88:<0.15328.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:55.879,ns_1@10.242.238.88:<0.15328.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 645 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:55.879,ns_1@10.242.238.88:<0.15328.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:55.880,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{645, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:55.883,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{389, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:55.883,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",389, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.15349.1>) [ns_server:debug,2014-08-19T16:50:55.883,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 389) [ns_server:debug,2014-08-19T16:50:55.883,ns_1@10.242.238.88:<0.15350.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:55.884,ns_1@10.242.238.88:<0.15350.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:55.884,ns_1@10.242.238.88:<0.15349.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 389 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:55.884,ns_1@10.242.238.88:<0.15355.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 389 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:55.884,ns_1@10.242.238.88:<0.15356.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 389 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:55.888,ns_1@10.242.238.88:<0.15357.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 389 into 'ns_1@10.242.238.90' is <18125.25825.0> [ns_server:debug,2014-08-19T16:50:55.890,ns_1@10.242.238.88:<0.15357.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 389 into 'ns_1@10.242.238.89' is <18124.30205.0> [rebalance:debug,2014-08-19T16:50:55.890,ns_1@10.242.238.88:<0.15349.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 389 is <0.15357.1> [ns_server:debug,2014-08-19T16:50:55.921,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,912126}, tap_estimate, {replica_building,"default",389,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25825.0>, <<"replication_building_389_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:55.929,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,920630}, tap_estimate, {replica_building,"default",389,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30205.0>, <<"replication_building_389_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:55.930,ns_1@10.242.238.88:<0.15358.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.30205.0>}, {'ns_1@10.242.238.90',<18125.25825.0>}]) [rebalance:info,2014-08-19T16:50:55.930,ns_1@10.242.238.88:<0.15349.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:55.931,ns_1@10.242.238.88:<0.15349.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 389 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:55.931,ns_1@10.242.238.88:<0.15349.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:55.932,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{389, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:55.935,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{898, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:55.935,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",898, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.15384.1>) [ns_server:debug,2014-08-19T16:50:55.935,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 898) [ns_server:debug,2014-08-19T16:50:55.935,ns_1@10.242.238.88:<0.15385.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:55.935,ns_1@10.242.238.88:<0.15385.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:55.936,ns_1@10.242.238.88:<0.15384.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 898 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:55.936,ns_1@10.242.238.88:<0.15390.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 898 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:55.936,ns_1@10.242.238.88:<0.15391.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 898 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:55.939,ns_1@10.242.238.88:<0.15392.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 898 into 'ns_1@10.242.238.89' is <18124.30211.0> [ns_server:debug,2014-08-19T16:50:55.942,ns_1@10.242.238.88:<0.15392.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 898 into 'ns_1@10.242.238.91' is <18126.27601.0> [rebalance:debug,2014-08-19T16:50:55.942,ns_1@10.242.238.88:<0.15384.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 898 is <0.15392.1> [ns_server:debug,2014-08-19T16:50:55.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,960162}, tap_estimate, {replica_building,"default",898,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30211.0>, <<"replication_building_898_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:55.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452655,972133}, tap_estimate, {replica_building,"default",898,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27601.0>, <<"replication_building_898_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:55.981,ns_1@10.242.238.88:<0.15393.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27601.0>}, {'ns_1@10.242.238.89',<18124.30211.0>}]) [rebalance:info,2014-08-19T16:50:55.981,ns_1@10.242.238.88:<0.15384.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:55.982,ns_1@10.242.238.88:<0.15384.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 898 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:55.982,ns_1@10.242.238.88:<0.15384.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:55.983,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{898, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:55.986,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{644, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:55.986,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",644, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.15405.1>) [ns_server:debug,2014-08-19T16:50:55.986,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 644) [ns_server:debug,2014-08-19T16:50:55.987,ns_1@10.242.238.88:<0.15406.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:55.987,ns_1@10.242.238.88:<0.15406.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:55.987,ns_1@10.242.238.88:<0.15405.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 644 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:55.987,ns_1@10.242.238.88:<0.15411.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 644 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:55.987,ns_1@10.242.238.88:<0.15412.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 644 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:55.991,ns_1@10.242.238.88:<0.15413.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 644 into 'ns_1@10.242.238.89' is <18124.30230.0> [ns_server:debug,2014-08-19T16:50:55.993,ns_1@10.242.238.88:<0.15413.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 644 into 'ns_1@10.242.238.90' is <18125.25830.0> [rebalance:debug,2014-08-19T16:50:55.993,ns_1@10.242.238.88:<0.15405.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 644 is <0.15413.1> [ns_server:debug,2014-08-19T16:50:55.998,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 179. Nacking mccouch update. [views:debug,2014-08-19T16:50:55.998,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/179. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:55.998,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",179,active,0} [ns_server:debug,2014-08-19T16:50:56.000,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670,359, 904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486,775, 720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280,225, 825,514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952,641, 330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457,146, 691,380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562,507, 196,796,741,430,975,664,353,898,587,276,221,821,766,455,144,872,689,561,506, 378,195,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352,897,769, 714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922, 794,739,611,428,300,245,973,845,662,534,479,351,168,896,768,713,585,402,274, 219,947,819,764,636,453,325,142,998,870,687,559,504,376,193,921,793,738,610, 427,299,244,116,972,844,661,533,478,350,895,712,584,401,273,218,1023,946,818, 763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298,243,971, 843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817,762,634,451,323, 140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970,842,659, 531,476,348,893,710,582,399,271,216,1021,944,816,761,633,450,322,995,867,684, 556,501,373,190,918,790,735,607,424,296,241,969,841,658,530,475,347,164,892, 709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,555,500,372, 189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,891,708,580,397, 269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733, 605,422,294,239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941, 813,758,630,447,319,136,992,864,681,553,498,370,187,915,787,732,604,421,293, 238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017,940,812,757,629, 446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837,654, 526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990, 862,679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470, 342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678,550,495, 367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703,575, 392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911, 783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208,1013, 936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288, 233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960,832, 649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857, 674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337,154, 882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230,830, 519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646,335, 880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828,517,462,696, 385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333,878,567,256, 201,801,746,435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005,928, 617,306,251,851,540,485,174,774,719,408,953,642,331,876,565,510,199,799,744, 433,122,978,667,356,901,590,279,224,824,513,458,692,381,1003,926,615,304,249, 849,538,483,172,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976, 665,354,899,588,277,222,822,767,456,690,379,1001,924,613,302,247,847,536,481, 170,770,715,404,949,638,327] [ns_server:debug,2014-08-19T16:50:56.026,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,16260}, tap_estimate, {replica_building,"default",644,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30230.0>, <<"replication_building_644_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:56.032,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,23363}, tap_estimate, {replica_building,"default",644,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25830.0>, <<"replication_building_644_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:56.032,ns_1@10.242.238.88:<0.15414.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25830.0>}, {'ns_1@10.242.238.89',<18124.30230.0>}]) [rebalance:info,2014-08-19T16:50:56.033,ns_1@10.242.238.88:<0.15405.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:56.033,ns_1@10.242.238.88:<0.15405.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 644 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:56.034,ns_1@10.242.238.88:<0.15405.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:56.034,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{644, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:56.037,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{388, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:56.037,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",388, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.15426.1>) [ns_server:debug,2014-08-19T16:50:56.037,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 388) [ns_server:debug,2014-08-19T16:50:56.038,ns_1@10.242.238.88:<0.15427.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:56.038,ns_1@10.242.238.88:<0.15427.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:56.038,ns_1@10.242.238.88:<0.15426.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 388 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:56.038,ns_1@10.242.238.88:<0.15432.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 388 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:56.038,ns_1@10.242.238.88:<0.15433.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 388 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:56.043,ns_1@10.242.238.88:<0.15434.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 388 into 'ns_1@10.242.238.90' is <18125.25850.0> [ns_server:debug,2014-08-19T16:50:56.046,ns_1@10.242.238.88:<0.15434.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 388 into 'ns_1@10.242.238.89' is <18124.30235.0> [rebalance:debug,2014-08-19T16:50:56.046,ns_1@10.242.238.88:<0.15426.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 388 is <0.15434.1> [views:debug,2014-08-19T16:50:56.065,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/179. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:56.065,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",179,active,0} [ns_server:debug,2014-08-19T16:50:56.072,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,63496}, tap_estimate, {replica_building,"default",388,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25850.0>, <<"replication_building_388_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:56.085,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,76022}, tap_estimate, {replica_building,"default",388,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30235.0>, <<"replication_building_388_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:56.085,ns_1@10.242.238.88:<0.15435.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.30235.0>}, {'ns_1@10.242.238.90',<18125.25850.0>}]) [rebalance:info,2014-08-19T16:50:56.085,ns_1@10.242.238.88:<0.15426.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:56.086,ns_1@10.242.238.88:<0.15426.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 388 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:56.086,ns_1@10.242.238.88:<0.15426.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:56.087,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{388, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:56.090,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{897, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:56.090,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",897, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.15447.1>) [ns_server:debug,2014-08-19T16:50:56.090,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 897) [ns_server:debug,2014-08-19T16:50:56.090,ns_1@10.242.238.88:<0.15448.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:56.091,ns_1@10.242.238.88:<0.15448.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:56.091,ns_1@10.242.238.88:<0.15447.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 897 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:56.091,ns_1@10.242.238.88:<0.15453.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 897 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:56.091,ns_1@10.242.238.88:<0.15454.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 897 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:56.095,ns_1@10.242.238.88:<0.15455.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 897 into 'ns_1@10.242.238.89' is <18124.30241.0> [ns_server:debug,2014-08-19T16:50:56.097,ns_1@10.242.238.88:<0.15455.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 897 into 'ns_1@10.242.238.91' is <18126.27621.0> [rebalance:debug,2014-08-19T16:50:56.097,ns_1@10.242.238.88:<0.15447.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 897 is <0.15455.1> [ns_server:debug,2014-08-19T16:50:56.124,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,115311}, tap_estimate, {replica_building,"default",897,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30241.0>, <<"replication_building_897_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:56.135,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,126586}, tap_estimate, {replica_building,"default",897,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27621.0>, <<"replication_building_897_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:56.136,ns_1@10.242.238.88:<0.15456.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27621.0>}, {'ns_1@10.242.238.89',<18124.30241.0>}]) [rebalance:info,2014-08-19T16:50:56.136,ns_1@10.242.238.88:<0.15447.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:56.136,ns_1@10.242.238.88:<0.15447.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 897 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:56.137,ns_1@10.242.238.88:<0.15447.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:56.137,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{897, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:56.140,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{643, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:56.141,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",643, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.15468.1>) [ns_server:debug,2014-08-19T16:50:56.141,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 643) [ns_server:debug,2014-08-19T16:50:56.141,ns_1@10.242.238.88:<0.15469.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:56.141,ns_1@10.242.238.88:<0.15469.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:56.142,ns_1@10.242.238.88:<0.15468.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 643 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:56.142,ns_1@10.242.238.88:<0.15474.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 643 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:56.142,ns_1@10.242.238.88:<0.15475.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 643 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:56.146,ns_1@10.242.238.88:<0.15480.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 643 into 'ns_1@10.242.238.89' is <18124.30246.0> [ns_server:debug,2014-08-19T16:50:56.149,ns_1@10.242.238.88:<0.15480.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 643 into 'ns_1@10.242.238.90' is <18125.25869.0> [rebalance:debug,2014-08-19T16:50:56.149,ns_1@10.242.238.88:<0.15468.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 643 is <0.15480.1> [ns_server:debug,2014-08-19T16:50:56.176,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,167517}, tap_estimate, {replica_building,"default",643,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30246.0>, <<"replication_building_643_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:56.193,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,184913}, tap_estimate, {replica_building,"default",643,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25869.0>, <<"replication_building_643_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:56.194,ns_1@10.242.238.88:<0.15485.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25869.0>}, {'ns_1@10.242.238.89',<18124.30246.0>}]) [rebalance:info,2014-08-19T16:50:56.194,ns_1@10.242.238.88:<0.15468.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:56.195,ns_1@10.242.238.88:<0.15468.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 643 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:56.195,ns_1@10.242.238.88:<0.15468.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:56.196,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{643, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:56.199,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{387, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:56.199,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",387, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.15503.1>) [ns_server:debug,2014-08-19T16:50:56.199,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 387) [ns_server:debug,2014-08-19T16:50:56.199,ns_1@10.242.238.88:<0.15504.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:56.199,ns_1@10.242.238.88:<0.15504.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:56.232,ns_1@10.242.238.88:<0.15503.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 387 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [ns_server:debug,2014-08-19T16:50:56.232,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 177. Nacking mccouch update. [rebalance:info,2014-08-19T16:50:56.232,ns_1@10.242.238.88:<0.15509.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 387 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:56.232,ns_1@10.242.238.88:<0.15510.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 387 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [views:debug,2014-08-19T16:50:56.232,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/177. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:56.233,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",177,active,0} [ns_server:debug,2014-08-19T16:50:56.235,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591,280, 225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407,952, 641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512,457, 146,691,380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873,562, 507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144,872,689,561, 506,378,195,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352,897, 769,714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377,194, 922,794,739,611,428,300,245,973,845,662,534,479,351,168,896,768,713,585,402, 274,219,947,819,764,636,453,325,142,998,870,687,559,504,376,193,921,793,738, 610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,273,218,1023,946, 818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298,243, 971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817,762,634,451, 323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970,842, 659,531,476,348,893,710,582,399,271,216,1021,944,816,761,633,450,322,995,867, 684,556,501,373,190,918,790,735,607,424,296,241,969,841,658,530,475,347,164, 892,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,555,500, 372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,891,708,580, 397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788, 733,605,422,294,239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018, 941,813,758,630,447,319,136,992,864,681,553,498,370,187,915,787,732,604,421, 293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017,940,812,757, 629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837, 654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134, 990,862,679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525, 470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678,550, 495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703, 575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183, 911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752, 624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960, 832,649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985, 857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337, 154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230, 830,519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646, 335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828,517,462, 696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333,878,567, 256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005, 928,617,306,251,851,540,485,174,774,719,408,953,642,331,876,565,510,199,799, 744,433,122,978,667,356,901,590,279,224,824,513,458,692,381,1003,926,615,304, 249,849,538,483,172,772,717,406,951,640,329,874,563,508,197,797,742,431,120, 976,665,354,899,588,277,222,822,767,456,690,379,1001,924,613,302,247,847,536, 481,170,770,715,404,949,638,327] [ns_server:debug,2014-08-19T16:50:56.236,ns_1@10.242.238.88:<0.15511.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 387 into 'ns_1@10.242.238.90' is <18125.25875.0> [ns_server:debug,2014-08-19T16:50:56.238,ns_1@10.242.238.88:<0.15511.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 387 into 'ns_1@10.242.238.89' is <18124.30265.0> [rebalance:debug,2014-08-19T16:50:56.238,ns_1@10.242.238.88:<0.15503.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 387 is <0.15511.1> [ns_server:debug,2014-08-19T16:50:56.265,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,256893}, tap_estimate, {replica_building,"default",387,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25875.0>, <<"replication_building_387_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:56.278,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,268981}, tap_estimate, {replica_building,"default",387,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30265.0>, <<"replication_building_387_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:56.278,ns_1@10.242.238.88:<0.15512.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.30265.0>}, {'ns_1@10.242.238.90',<18125.25875.0>}]) [rebalance:info,2014-08-19T16:50:56.278,ns_1@10.242.238.88:<0.15503.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:56.279,ns_1@10.242.238.88:<0.15503.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 387 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:56.279,ns_1@10.242.238.88:<0.15503.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:56.280,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{387, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:56.283,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{896, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:56.283,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",896, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.15524.1>) [ns_server:debug,2014-08-19T16:50:56.283,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 896) [ns_server:debug,2014-08-19T16:50:56.283,ns_1@10.242.238.88:<0.15525.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:56.283,ns_1@10.242.238.88:<0.15525.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:50:56.283,ns_1@10.242.238.88:<0.15524.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 896 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:56.284,ns_1@10.242.238.88:<0.15530.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 896 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:56.284,ns_1@10.242.238.88:<0.15531.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 896 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:56.287,ns_1@10.242.238.88:<0.15532.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 896 into 'ns_1@10.242.238.89' is <18124.30271.0> [ns_server:debug,2014-08-19T16:50:56.290,ns_1@10.242.238.88:<0.15532.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 896 into 'ns_1@10.242.238.91' is <18126.27655.0> [rebalance:debug,2014-08-19T16:50:56.290,ns_1@10.242.238.88:<0.15524.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 896 is <0.15532.1> [views:debug,2014-08-19T16:50:56.308,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/177. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:56.308,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",177,active,0} [ns_server:debug,2014-08-19T16:50:56.315,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,306483}, tap_estimate, {replica_building,"default",896,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30271.0>, <<"replication_building_896_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:56.330,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,321205}, tap_estimate, {replica_building,"default",896,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27655.0>, <<"replication_building_896_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:50:56.330,ns_1@10.242.238.88:<0.15533.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27655.0>}, {'ns_1@10.242.238.89',<18124.30271.0>}]) [rebalance:info,2014-08-19T16:50:56.331,ns_1@10.242.238.88:<0.15524.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:50:56.331,ns_1@10.242.238.88:<0.15524.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 896 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:56.332,ns_1@10.242.238.88:<0.15524.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:56.332,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{896, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:56.335,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{642, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:56.335,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",642, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.15545.1>) [ns_server:debug,2014-08-19T16:50:56.335,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 642) [ns_server:debug,2014-08-19T16:50:56.336,ns_1@10.242.238.88:<0.15546.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:56.336,ns_1@10.242.238.88:<0.15546.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:56.336,ns_1@10.242.238.88:<0.15545.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 642 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:56.336,ns_1@10.242.238.88:<0.15551.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 642 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:56.336,ns_1@10.242.238.88:<0.15552.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 642 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:56.339,ns_1@10.242.238.88:<0.15553.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 642 into 'ns_1@10.242.238.89' is <18124.30276.0> [ns_server:debug,2014-08-19T16:50:56.342,ns_1@10.242.238.88:<0.15553.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 642 into 'ns_1@10.242.238.90' is <18125.25894.0> [rebalance:debug,2014-08-19T16:50:56.342,ns_1@10.242.238.88:<0.15545.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 642 is <0.15553.1> [ns_server:debug,2014-08-19T16:50:56.369,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,360845}, tap_estimate, {replica_building,"default",642,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30276.0>, <<"replication_building_642_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:56.381,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,372662}, tap_estimate, {replica_building,"default",642,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25894.0>, <<"replication_building_642_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:56.382,ns_1@10.242.238.88:<0.15554.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25894.0>}, {'ns_1@10.242.238.89',<18124.30276.0>}]) [rebalance:info,2014-08-19T16:50:56.382,ns_1@10.242.238.88:<0.15545.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:56.383,ns_1@10.242.238.88:<0.15545.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 642 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:56.383,ns_1@10.242.238.88:<0.15545.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:56.384,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{642, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:56.386,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{386, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:56.386,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",386, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.15583.1>) [ns_server:debug,2014-08-19T16:50:56.387,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 386) [ns_server:debug,2014-08-19T16:50:56.387,ns_1@10.242.238.88:<0.15584.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:56.387,ns_1@10.242.238.88:<0.15584.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:56.387,ns_1@10.242.238.88:<0.15583.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 386 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:56.387,ns_1@10.242.238.88:<0.15591.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 386 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:56.387,ns_1@10.242.238.88:<0.15592.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 386 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:56.392,ns_1@10.242.238.88:<0.15593.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 386 into 'ns_1@10.242.238.90' is <18125.25900.0> [ns_server:debug,2014-08-19T16:50:56.394,ns_1@10.242.238.88:<0.15593.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 386 into 'ns_1@10.242.238.89' is <18124.30281.0> [rebalance:debug,2014-08-19T16:50:56.394,ns_1@10.242.238.88:<0.15583.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 386 is <0.15593.1> [ns_server:debug,2014-08-19T16:50:56.421,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,412194}, tap_estimate, {replica_building,"default",386,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25900.0>, <<"replication_building_386_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:56.435,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,426842}, tap_estimate, {replica_building,"default",386,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30281.0>, <<"replication_building_386_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:56.436,ns_1@10.242.238.88:<0.15600.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.30281.0>}, {'ns_1@10.242.238.90',<18125.25900.0>}]) [rebalance:info,2014-08-19T16:50:56.436,ns_1@10.242.238.88:<0.15583.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:56.437,ns_1@10.242.238.88:<0.15583.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 386 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:56.437,ns_1@10.242.238.88:<0.15583.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:56.438,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{386, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:56.441,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{641, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:56.441,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",641, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.15615.1>) [ns_server:debug,2014-08-19T16:50:56.441,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 641) [ns_server:debug,2014-08-19T16:50:56.441,ns_1@10.242.238.88:<0.15616.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:56.442,ns_1@10.242.238.88:<0.15616.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:56.442,ns_1@10.242.238.88:<0.15615.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 641 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:56.442,ns_1@10.242.238.88:<0.15621.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 641 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:56.442,ns_1@10.242.238.88:<0.15622.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 641 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:56.446,ns_1@10.242.238.88:<0.15623.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 641 into 'ns_1@10.242.238.89' is <18124.30301.0> [ns_server:debug,2014-08-19T16:50:56.447,ns_1@10.242.238.88:<0.15623.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 641 into 'ns_1@10.242.238.90' is <18125.25919.0> [rebalance:debug,2014-08-19T16:50:56.447,ns_1@10.242.238.88:<0.15615.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 641 is <0.15623.1> [ns_server:debug,2014-08-19T16:50:56.475,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 175. Nacking mccouch update. [ns_server:debug,2014-08-19T16:50:56.475,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,466057}, tap_estimate, {replica_building,"default",641,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30301.0>, <<"replication_building_641_'ns_1@10.242.238.89'">>} [views:debug,2014-08-19T16:50:56.475,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/175. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:56.475,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",175,active,0} [ns_server:debug,2014-08-19T16:50:56.477,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,773,718,407, 952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823,512, 457,146,691,380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328,873, 562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144,872,689, 561,506,378,195,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352, 897,769,714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377, 194,922,794,739,611,428,300,245,973,845,662,534,479,351,168,896,768,713,585, 402,274,219,947,819,764,636,453,325,142,998,870,687,559,504,376,193,921,793, 738,610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,273,218,1023, 946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298, 243,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817,762,634, 451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,893,710,582,399,271,216,1021,944,816,761,633,450,322,995, 867,684,556,501,373,190,918,790,735,607,424,296,241,969,841,658,530,475,347, 164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,555, 500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,891,708, 580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916, 788,733,605,422,294,239,967,839,656,528,473,345,162,890,707,579,396,268,213, 1018,941,813,758,630,447,319,136,992,864,681,553,498,370,187,915,787,732,604, 421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017,940,812, 757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965, 837,654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317, 134,990,862,679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653, 525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678, 550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886, 703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 183,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263, 208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599, 416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807, 752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232, 960,832,649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312, 985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465, 337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285, 230,830,519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957, 646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828,517, 462,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333,878, 567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460,694,383, 1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331,876,565,510,199, 799,744,433,122,978,667,356,901,590,279,224,824,513,458,692,381,1003,926,615, 304,249,849,538,483,172,772,717,406,951,640,329,874,563,508,197,797,742,431, 120,976,665,354,899,588,277,222,822,767,456,690,379,1001,924,613,302,247,847, 536,481,170,770,715,404,949,638,327] [ns_server:debug,2014-08-19T16:50:56.490,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,481040}, tap_estimate, {replica_building,"default",641,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25919.0>, <<"replication_building_641_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:56.490,ns_1@10.242.238.88:<0.15624.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25919.0>}, {'ns_1@10.242.238.89',<18124.30301.0>}]) [rebalance:info,2014-08-19T16:50:56.490,ns_1@10.242.238.88:<0.15615.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:56.491,ns_1@10.242.238.88:<0.15615.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 641 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:56.491,ns_1@10.242.238.88:<0.15615.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:56.492,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{641, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:56.494,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{385, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:56.494,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",385, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.15636.1>) [ns_server:debug,2014-08-19T16:50:56.494,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 385) [ns_server:debug,2014-08-19T16:50:56.495,ns_1@10.242.238.88:<0.15637.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:56.495,ns_1@10.242.238.88:<0.15637.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:56.495,ns_1@10.242.238.88:<0.15636.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 385 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:56.495,ns_1@10.242.238.88:<0.15642.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 385 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:56.495,ns_1@10.242.238.88:<0.15643.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 385 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:56.499,ns_1@10.242.238.88:<0.15644.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 385 into 'ns_1@10.242.238.90' is <18125.25925.0> [ns_server:debug,2014-08-19T16:50:56.501,ns_1@10.242.238.88:<0.15644.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 385 into 'ns_1@10.242.238.89' is <18124.30306.0> [rebalance:debug,2014-08-19T16:50:56.501,ns_1@10.242.238.88:<0.15636.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 385 is <0.15644.1> [ns_server:debug,2014-08-19T16:50:56.528,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,519080}, tap_estimate, {replica_building,"default",385,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25925.0>, <<"replication_building_385_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:56.542,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,533750}, tap_estimate, {replica_building,"default",385,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30306.0>, <<"replication_building_385_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:56.543,ns_1@10.242.238.88:<0.15645.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.30306.0>}, {'ns_1@10.242.238.90',<18125.25925.0>}]) [rebalance:info,2014-08-19T16:50:56.543,ns_1@10.242.238.88:<0.15636.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:56.544,ns_1@10.242.238.88:<0.15636.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 385 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:56.544,ns_1@10.242.238.88:<0.15636.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:56.545,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{385, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:56.547,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{640, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:50:56.547,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",640, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.15657.1>) [ns_server:debug,2014-08-19T16:50:56.547,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 640) [ns_server:debug,2014-08-19T16:50:56.548,ns_1@10.242.238.88:<0.15658.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:56.548,ns_1@10.242.238.88:<0.15658.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:50:56.548,ns_1@10.242.238.88:<0.15657.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 640 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:56.548,ns_1@10.242.238.88:<0.15663.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 640 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:56.548,ns_1@10.242.238.88:<0.15664.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 640 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [views:debug,2014-08-19T16:50:56.550,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/175. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:56.550,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",175,active,0} [ns_server:debug,2014-08-19T16:50:56.552,ns_1@10.242.238.88:<0.15665.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 640 into 'ns_1@10.242.238.89' is <18124.30312.0> [ns_server:debug,2014-08-19T16:50:56.554,ns_1@10.242.238.88:<0.15665.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 640 into 'ns_1@10.242.238.90' is <18125.25930.0> [rebalance:debug,2014-08-19T16:50:56.554,ns_1@10.242.238.88:<0.15657.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 640 is <0.15665.1> [ns_server:debug,2014-08-19T16:50:56.582,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,573289}, tap_estimate, {replica_building,"default",640,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30312.0>, <<"replication_building_640_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:56.594,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,585441}, tap_estimate, {replica_building,"default",640,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25930.0>, <<"replication_building_640_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:56.594,ns_1@10.242.238.88:<0.15666.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.25930.0>}, {'ns_1@10.242.238.89',<18124.30312.0>}]) [rebalance:info,2014-08-19T16:50:56.595,ns_1@10.242.238.88:<0.15657.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:50:56.595,ns_1@10.242.238.88:<0.15657.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 640 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:56.596,ns_1@10.242.238.88:<0.15657.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:56.596,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{640, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:50:56.598,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{384, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:50:56.598,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",384, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.15679.1>) [ns_server:debug,2014-08-19T16:50:56.599,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 384) [ns_server:debug,2014-08-19T16:50:56.599,ns_1@10.242.238.88:<0.15680.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:50:56.599,ns_1@10.242.238.88:<0.15680.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:50:56.599,ns_1@10.242.238.88:<0.15679.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 384 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:50:56.599,ns_1@10.242.238.88:<0.15685.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 384 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:50:56.599,ns_1@10.242.238.88:<0.15686.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 384 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:50:56.603,ns_1@10.242.238.88:<0.15687.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 384 into 'ns_1@10.242.238.90' is <18125.25936.0> [ns_server:debug,2014-08-19T16:50:56.605,ns_1@10.242.238.88:<0.15687.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 384 into 'ns_1@10.242.238.89' is <18124.30331.0> [rebalance:debug,2014-08-19T16:50:56.605,ns_1@10.242.238.88:<0.15679.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 384 is <0.15687.1> [ns_server:debug,2014-08-19T16:50:56.632,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,623584}, tap_estimate, {replica_building,"default",384,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.25936.0>, <<"replication_building_384_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:50:56.645,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452656,636890}, tap_estimate, {replica_building,"default",384,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.30331.0>, <<"replication_building_384_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:50:56.646,ns_1@10.242.238.88:<0.15688.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.30331.0>}, {'ns_1@10.242.238.90',<18125.25936.0>}]) [rebalance:info,2014-08-19T16:50:56.646,ns_1@10.242.238.88:<0.15679.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:50:56.647,ns_1@10.242.238.88:<0.15679.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 384 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:50:56.647,ns_1@10.242.238.88:<0.15679.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:50:56.648,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{384, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:50:56.649,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:50:56.649,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 173. Nacking mccouch update. [views:debug,2014-08-19T16:50:56.649,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/173. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:56.649,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",173,active,0} [ns_server:debug,2014-08-19T16:50:56.651,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,771,716,405,950,639,328, 873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144,872, 689,561,506,378,195,1000,923,795,740,612,429,301,246,118,974,846,663,535,480, 352,897,769,714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505, 377,194,922,794,739,611,428,300,245,973,845,662,534,479,351,168,896,768,713, 585,402,274,219,947,819,764,636,453,325,142,998,870,687,559,504,376,193,921, 793,738,610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,273,218, 1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426, 298,243,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817,762, 634,451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114, 970,842,659,531,476,348,893,710,582,399,271,216,1021,944,816,761,633,450,322, 995,867,684,556,501,373,190,918,790,735,607,424,296,241,969,841,658,530,475, 347,164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683, 555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,891, 708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188, 916,788,733,605,422,294,239,967,839,656,528,473,345,162,890,707,579,396,268, 213,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,187,915,787,732, 604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017,940, 812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237, 965,837,654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445, 317,134,990,862,679,551,496,368,185,913,785,730,602,419,291,236,108,964,836, 653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158, 886,703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494, 366,183,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391, 263,208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727, 599,416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935, 807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287, 232,960,832,649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440, 312,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520, 465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596, 285,230,830,519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412, 957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828, 517,462,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333, 878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460,694, 383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331,876,565,510, 199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,692,381,1003,926, 615,304,249,849,538,483,172,772,717,406,951,640,329,874,563,508,197,797,742, 431,120,976,665,354,899,588,277,222,822,767,456,690,379,1001,924,613,302,247, 847,536,481,170,770,715,404,949,638,327] [views:debug,2014-08-19T16:50:56.683,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/173. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:56.683,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",173,active,0} [ns_server:debug,2014-08-19T16:50:56.758,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 171. Nacking mccouch update. [views:debug,2014-08-19T16:50:56.758,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/171. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:56.758,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",171,active,0} [ns_server:debug,2014-08-19T16:50:56.760,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 872,689,561,506,378,195,1000,923,795,740,612,429,301,246,118,974,846,663,535, 480,352,897,769,714,586,403,275,220,948,820,765,637,454,326,999,871,688,560, 505,377,194,922,794,739,611,428,300,245,973,845,662,534,479,351,168,896,768, 713,585,402,274,219,947,819,764,636,453,325,142,998,870,687,559,504,376,193, 921,793,738,610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,273, 218,1023,946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609, 426,298,243,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817, 762,634,451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242, 114,970,842,659,531,476,348,893,710,582,399,271,216,1021,944,816,761,633,450, 322,995,867,684,556,501,373,190,918,790,735,607,424,296,241,969,841,658,530, 475,347,164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866, 683,555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346, 891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371, 188,916,788,733,605,422,294,239,967,839,656,528,473,345,162,890,707,579,396, 268,213,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,187,915,787, 732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017, 940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292, 237,965,837,654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628, 445,317,134,990,862,679,551,496,368,185,913,785,730,602,419,291,236,108,964, 836,653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989, 861,678,550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341, 158,886,703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549, 494,366,183,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574, 391,263,208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782, 727,599,416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012, 935,807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415, 287,232,960,832,649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623, 440,312,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648, 520,465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907, 596,285,230,830,519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723, 412,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228, 828,517,462,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644, 333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460, 694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331,876,565, 510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,692,381,1003, 926,615,304,249,849,538,483,172,772,717,406,951,640,329,874,563,508,197,797, 742,431,120,976,665,354,899,588,277,222,822,767,456,690,379,1001,924,613,302, 247,847,536,481,170,770,715,404,949,638,327] [views:debug,2014-08-19T16:50:56.792,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/171. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:56.793,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",171,active,0} [ns_server:debug,2014-08-19T16:50:56.867,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 169. Nacking mccouch update. [views:debug,2014-08-19T16:50:56.867,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/169. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:56.867,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",169,active,0} [ns_server:debug,2014-08-19T16:50:56.869,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 689,378,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352,169,897, 769,714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377,194, 922,794,739,611,428,300,245,973,845,662,534,479,351,168,896,768,713,585,402, 274,219,947,819,764,636,453,325,142,998,870,687,559,504,376,193,921,793,738, 610,427,299,244,116,972,844,661,533,478,350,895,712,584,401,273,218,1023,946, 818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298,243, 971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817,762,634,451, 323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970,842, 659,531,476,348,893,710,582,399,271,216,1021,944,816,761,633,450,322,995,867, 684,556,501,373,190,918,790,735,607,424,296,241,969,841,658,530,475,347,164, 892,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,555,500, 372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,891,708,580, 397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788, 733,605,422,294,239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018, 941,813,758,630,447,319,136,992,864,681,553,498,370,187,915,787,732,604,421, 293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017,940,812,757, 629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837, 654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134, 990,862,679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525, 470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678,550, 495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703, 575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183, 911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752, 624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960, 832,649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312,985, 857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337, 154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230, 830,519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646, 335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828,517,462, 696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333,878,567, 256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460,694,383,1005, 928,617,306,251,851,540,485,174,774,719,408,953,642,331,876,565,510,199,799, 744,433,122,978,667,356,901,590,279,224,824,513,458,692,381,1003,926,615,304, 249,849,538,483,172,772,717,406,951,640,329,874,563,508,197,797,742,431,120, 976,665,354,899,588,277,222,822,767,456,690,379,1001,924,613,302,247,847,536, 481,170,770,715,404,949,638,327,872,561,506,195] [views:debug,2014-08-19T16:50:56.901,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/169. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:56.902,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",169,active,0} [ns_server:debug,2014-08-19T16:50:56.985,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 167. Nacking mccouch update. [views:debug,2014-08-19T16:50:56.985,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/167. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:56.985,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",167,active,0} [ns_server:debug,2014-08-19T16:50:56.987,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 689,378,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352,169,897, 769,714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377,194, 922,794,739,611,428,300,245,973,845,662,534,479,351,168,896,768,713,585,402, 274,219,947,819,764,636,453,325,142,998,870,687,559,504,376,193,921,793,738, 610,427,299,244,116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023, 946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298, 243,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817,762,634, 451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,893,710,582,399,271,216,1021,944,816,761,633,450,322,995, 867,684,556,501,373,190,918,790,735,607,424,296,241,969,841,658,530,475,347, 164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,555, 500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,891,708, 580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916, 788,733,605,422,294,239,967,839,656,528,473,345,162,890,707,579,396,268,213, 1018,941,813,758,630,447,319,136,992,864,681,553,498,370,187,915,787,732,604, 421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017,940,812, 757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965, 837,654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317, 134,990,862,679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653, 525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678, 550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886, 703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 183,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263, 208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599, 416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807, 752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232, 960,832,649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312, 985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465, 337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285, 230,830,519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957, 646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828,517, 462,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333,878, 567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460,694,383, 1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331,876,565,510,199, 799,744,433,122,978,667,356,901,590,279,224,824,513,458,692,381,1003,926,615, 304,249,849,538,483,172,772,717,406,951,640,329,874,563,508,197,797,742,431, 120,976,665,354,899,588,277,222,822,767,456,690,379,1001,924,613,302,247,847, 536,481,170,770,715,404,949,638,327,872,561,506,195] [views:debug,2014-08-19T16:50:57.020,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/167. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:57.020,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",167,active,0} [ns_server:debug,2014-08-19T16:50:57.167,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 165. Nacking mccouch update. [views:debug,2014-08-19T16:50:57.167,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/165. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:57.167,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",165,active,0} [ns_server:debug,2014-08-19T16:50:57.169,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 689,378,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352,169,897, 769,714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377,194, 922,794,739,611,428,300,245,973,845,662,534,479,351,168,896,768,713,585,402, 274,219,947,819,764,636,453,325,142,998,870,687,559,504,376,193,921,793,738, 610,427,299,244,116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023, 946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298, 243,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817,762,634, 451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322, 995,867,684,556,501,373,190,918,790,735,607,424,296,241,969,841,658,530,475, 347,164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683, 555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,891, 708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188, 916,788,733,605,422,294,239,967,839,656,528,473,345,162,890,707,579,396,268, 213,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,187,915,787,732, 604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017,940, 812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237, 965,837,654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445, 317,134,990,862,679,551,496,368,185,913,785,730,602,419,291,236,108,964,836, 653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861, 678,550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158, 886,703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494, 366,183,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391, 263,208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727, 599,416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935, 807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287, 232,960,832,649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440, 312,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520, 465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596, 285,230,830,519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412, 957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828, 517,462,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333, 878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460,694, 383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331,876,565,510, 199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,692,381,1003,926, 615,304,249,849,538,483,172,772,717,406,951,640,329,874,563,508,197,797,742, 431,120,976,665,354,899,588,277,222,822,767,456,690,379,1001,924,613,302,247, 847,536,481,170,770,715,404,949,638,327,872,561,506,195] [views:debug,2014-08-19T16:50:57.251,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/165. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:57.251,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",165,active,0} [ns_server:debug,2014-08-19T16:50:57.434,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 163. Nacking mccouch update. [views:debug,2014-08-19T16:50:57.434,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/163. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:57.435,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",163,active,0} [ns_server:debug,2014-08-19T16:50:57.437,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 689,378,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352,169,897, 769,714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377,194, 922,794,739,611,428,300,245,973,845,662,534,479,351,168,896,768,713,585,402, 274,219,947,819,764,636,453,325,142,998,870,687,559,504,376,193,921,793,738, 610,427,299,244,116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023, 946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298, 243,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817,762,634, 451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322, 995,867,684,556,501,373,190,918,790,735,607,424,296,241,969,841,658,530,475, 347,164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683, 555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,163, 891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371, 188,916,788,733,605,422,294,239,967,839,656,528,473,345,162,890,707,579,396, 268,213,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,187,915,787, 732,604,421,293,238,110,966,838,655,527,472,344,889,706,578,395,267,212,1017, 940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292, 237,965,837,654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628, 445,317,134,990,862,679,551,496,368,185,913,785,730,602,419,291,236,108,964, 836,653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316,989, 861,678,550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341, 158,886,703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549, 494,366,183,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574, 391,263,208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782, 727,599,416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012, 935,807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415, 287,232,960,832,649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623, 440,312,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648, 520,465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907, 596,285,230,830,519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723, 412,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228, 828,517,462,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644, 333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460, 694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331,876,565, 510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,692,381,1003, 926,615,304,249,849,538,483,172,772,717,406,951,640,329,874,563,508,197,797, 742,431,120,976,665,354,899,588,277,222,822,767,456,690,379,1001,924,613,302, 247,847,536,481,170,770,715,404,949,638,327,872,561,506,195] [views:debug,2014-08-19T16:50:57.518,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/163. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:57.518,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",163,active,0} [ns_server:debug,2014-08-19T16:50:57.703,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 161. Nacking mccouch update. [views:debug,2014-08-19T16:50:57.703,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/161. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:57.703,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",161,active,0} [ns_server:debug,2014-08-19T16:50:57.705,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 689,378,1000,923,795,740,612,429,301,246,118,974,846,663,535,480,352,169,897, 769,714,586,403,275,220,948,820,765,637,454,326,999,871,688,560,505,377,194, 922,794,739,611,428,300,245,973,845,662,534,479,351,168,896,768,713,585,402, 274,219,947,819,764,636,453,325,142,998,870,687,559,504,376,193,921,793,738, 610,427,299,244,116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023, 946,818,763,635,452,324,997,869,686,558,503,375,192,920,792,737,609,426,298, 243,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817,762,634, 451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322, 995,867,684,556,501,373,190,918,790,735,607,424,296,241,969,841,658,530,475, 347,164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683, 555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,163, 891,708,580,397,269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371, 188,916,788,733,605,422,294,239,967,839,656,528,473,345,162,890,707,579,396, 268,213,1018,941,813,758,630,447,319,136,992,864,681,553,498,370,187,915,787, 732,604,421,293,238,110,966,838,655,527,472,344,161,889,706,578,395,267,212, 1017,940,812,757,629,446,318,991,863,680,552,497,369,186,914,786,731,603,420, 292,237,965,837,654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756, 628,445,317,134,990,862,679,551,496,368,185,913,785,730,602,419,291,236,108, 964,836,653,525,470,342,887,704,576,393,265,210,1015,938,810,755,627,444,316, 989,861,678,550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469, 341,158,886,703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677, 549,494,366,183,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702, 574,391,263,208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910, 782,727,599,416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207, 1012,935,807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598, 415,287,232,960,832,649,521,466,338,883,700,572,389,261,206,1011,934,806,751, 623,440,312,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831, 648,520,465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362, 907,596,285,230,830,519,464,698,387,1009,932,621,310,255,855,544,489,178,778, 723,412,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283, 228,828,517,462,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955, 644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515, 460,694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331,876, 565,510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,692,381, 1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329,874,563,508,197, 797,742,431,120,976,665,354,899,588,277,222,822,767,456,690,379,1001,924,613, 302,247,847,536,481,170,770,715,404,949,638,327,872,561,506,195] [views:debug,2014-08-19T16:50:57.787,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/161. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:57.788,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",161,active,0} [ns_server:debug,2014-08-19T16:50:57.950,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,384, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_384_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_384_'ns_1@10.242.238.90'">>}]}, {move_state,640, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_640_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_640_'ns_1@10.242.238.89'">>}]}, {move_state,385, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_385_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_385_'ns_1@10.242.238.90'">>}]}, {move_state,641, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_641_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_641_'ns_1@10.242.238.89'">>}]}, {move_state,386, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_386_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_386_'ns_1@10.242.238.90'">>}]}, {move_state,642, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_642_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_642_'ns_1@10.242.238.89'">>}]}, {move_state,896, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_896_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_896_'ns_1@10.242.238.89'">>}]}, {move_state,387, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_387_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_387_'ns_1@10.242.238.90'">>}]}, {move_state,643, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_643_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_643_'ns_1@10.242.238.89'">>}]}, {move_state,897, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_897_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_897_'ns_1@10.242.238.89'">>}]}, {move_state,388, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_388_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_388_'ns_1@10.242.238.90'">>}]}, {move_state,644, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_644_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_644_'ns_1@10.242.238.89'">>}]}, {move_state,898, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_898_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_898_'ns_1@10.242.238.89'">>}]}, {move_state,389, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_389_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_389_'ns_1@10.242.238.90'">>}]}, {move_state,645, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_645_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_645_'ns_1@10.242.238.89'">>}]}, {move_state,899, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_899_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_899_'ns_1@10.242.238.89'">>}]}, {move_state,390, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_390_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_390_'ns_1@10.242.238.90'">>}]}, {move_state,646, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_646_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_646_'ns_1@10.242.238.89'">>}]}, {move_state,900, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_900_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_900_'ns_1@10.242.238.89'">>}]}, {move_state,391, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_391_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_391_'ns_1@10.242.238.90'">>}]}, {move_state,647, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_647_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_647_'ns_1@10.242.238.89'">>}]}, {move_state,901, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_901_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_901_'ns_1@10.242.238.89'">>}]}, {move_state,392, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_392_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_392_'ns_1@10.242.238.90'">>}]}, {move_state,648, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_648_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_648_'ns_1@10.242.238.89'">>}]}, {move_state,902, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_902_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_902_'ns_1@10.242.238.89'">>}]}, {move_state,393, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_393_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_393_'ns_1@10.242.238.90'">>}]}, {move_state,649, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_649_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_649_'ns_1@10.242.238.89'">>}]}, {move_state,903, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_903_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_903_'ns_1@10.242.238.89'">>}]}, {move_state,394, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_394_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_394_'ns_1@10.242.238.90'">>}]}, {move_state,650, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_650_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_650_'ns_1@10.242.238.89'">>}]}, {move_state,904, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_904_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_904_'ns_1@10.242.238.89'">>}]}, {move_state,395, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_395_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_395_'ns_1@10.242.238.90'">>}]}, {move_state,651, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_651_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_651_'ns_1@10.242.238.89'">>}]}, {move_state,905, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_905_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_905_'ns_1@10.242.238.89'">>}]}, {move_state,396, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_396_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_396_'ns_1@10.242.238.90'">>}]}, {move_state,652, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_652_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_652_'ns_1@10.242.238.89'">>}]}, {move_state,906, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_906_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_906_'ns_1@10.242.238.89'">>}]}, {move_state,397, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_397_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_397_'ns_1@10.242.238.90'">>}]}, {move_state,653, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_653_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_653_'ns_1@10.242.238.89'">>}]}, {move_state,907, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_907_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_907_'ns_1@10.242.238.89'">>}]}, {move_state,398, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_398_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_398_'ns_1@10.242.238.90'">>}]}, {move_state,654, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_654_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_654_'ns_1@10.242.238.89'">>}]}, {move_state,908, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_908_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_908_'ns_1@10.242.238.89'">>}]}, {move_state,399, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_399_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_399_'ns_1@10.242.238.90'">>}]}, {move_state,655, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_655_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_655_'ns_1@10.242.238.89'">>}]}, {move_state,909, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_909_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_909_'ns_1@10.242.238.89'">>}]}, {move_state,400, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_400_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_400_'ns_1@10.242.238.90'">>}]}, {move_state,656, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_656_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_656_'ns_1@10.242.238.89'">>}]}, {move_state,910, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_910_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_910_'ns_1@10.242.238.89'">>}]}, {move_state,401, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_401_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_401_'ns_1@10.242.238.90'">>}]}, {move_state,657, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_657_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_657_'ns_1@10.242.238.89'">>}]}, {move_state,911, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_911_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_911_'ns_1@10.242.238.89'">>}]}, {move_state,402, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_402_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_402_'ns_1@10.242.238.90'">>}]}, {move_state,658, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_658_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_658_'ns_1@10.242.238.89'">>}]}, {move_state,912, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_912_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_912_'ns_1@10.242.238.89'">>}]}, {move_state,403, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_403_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_403_'ns_1@10.242.238.90'">>}]}, {move_state,659, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_659_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_659_'ns_1@10.242.238.89'">>}]}, {move_state,913, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_913_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_913_'ns_1@10.242.238.89'">>}]}, {move_state,404, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_404_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_404_'ns_1@10.242.238.90'">>}]}, {move_state,660, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_660_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_660_'ns_1@10.242.238.89'">>}]}, {move_state,914, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_914_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_914_'ns_1@10.242.238.89'">>}]}, {move_state,405, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_405_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_405_'ns_1@10.242.238.90'">>}]}, {move_state,661, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_661_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_661_'ns_1@10.242.238.89'">>}]}, {move_state,915, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_915_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_915_'ns_1@10.242.238.89'">>}]}] [ns_server:debug,2014-08-19T16:50:57.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 384, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 640, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 385, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 641, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 386, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 642, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 896, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 387, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 643, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 897, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 388, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.962,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 159. Nacking mccouch update. [views:debug,2014-08-19T16:50:57.962,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/159. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:57.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 644, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.962,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",159,active,0} [ns_server:debug,2014-08-19T16:50:57.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 898, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 389, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 645, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 899, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.964,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 689,378,1000,923,612,301,246,974,846,663,535,480,352,169,897,769,714,586,403, 275,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611, 428,300,245,973,845,662,534,479,351,168,896,768,713,585,402,274,219,947,819, 764,636,453,325,142,998,870,687,559,504,376,193,921,793,738,610,427,299,244, 116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635, 452,324,997,869,686,558,503,375,192,920,792,737,609,426,298,243,971,843,660, 532,477,349,166,894,711,583,400,272,217,1022,945,817,762,634,451,323,140,996, 868,685,557,502,374,191,919,791,736,608,425,297,242,114,970,842,659,531,476, 348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322,995,867,684,556, 501,373,190,918,790,735,607,424,296,241,969,841,658,530,475,347,164,892,709, 581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,189, 917,789,734,606,423,295,240,112,968,840,657,529,474,346,163,891,708,580,397, 269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733, 605,422,294,239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941, 813,758,630,447,319,136,992,864,681,553,498,370,187,915,787,732,604,421,293, 238,110,966,838,655,527,472,344,161,889,706,578,395,267,212,1017,940,812,757, 629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837, 654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134, 990,862,679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525, 470,342,159,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678, 550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886, 703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 183,911,783,728,600,417,289,234,962,834,651,523,468,340,885,702,574,391,263, 208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599, 416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807, 752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232, 960,832,649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440,312, 985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465, 337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285, 230,830,519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957, 646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828,517, 462,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333,878, 567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460,694,383, 1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331,876,565,510,199, 799,744,433,122,978,667,356,901,590,279,224,824,513,458,692,381,1003,926,615, 304,249,849,538,483,172,772,717,406,951,640,329,874,563,508,197,797,742,431, 120,976,665,354,899,588,277,222,822,767,456,690,379,1001,924,613,302,247,847, 536,481,170,770,715,404,949,638,327,872,561,506,195,795,740,429,118] [ns_server:debug,2014-08-19T16:50:57.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 390, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 646, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 900, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 391, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 647, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 901, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 392, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 648, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 902, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 393, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 649, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 903, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 394, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 650, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 904, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 395, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 651, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 905, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 396, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 652, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 906, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 397, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 653, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 907, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 398, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 654, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 908, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 399, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 655, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 909, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 400, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 656, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 910, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 401, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 657, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 911, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 402, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 658, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 912, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 403, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.985,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 659, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.985,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 913, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.986,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 404, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.986,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 660, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.987,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 914, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.987,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 405, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:50:57.988,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 661, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:50:57.988,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 915, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [views:debug,2014-08-19T16:50:58.046,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/159. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:58.046,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",159,active,0} [ns_server:debug,2014-08-19T16:50:58.221,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 157. Nacking mccouch update. [views:debug,2014-08-19T16:50:58.222,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/157. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:58.222,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",157,active,0} [ns_server:debug,2014-08-19T16:50:58.224,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 689,378,1000,923,612,301,246,974,846,663,535,480,352,169,897,769,714,586,403, 275,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611, 428,300,245,973,845,662,534,479,351,168,896,768,713,585,402,274,219,947,819, 764,636,453,325,142,998,870,687,559,504,376,193,921,793,738,610,427,299,244, 116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635, 452,324,997,869,686,558,503,375,192,920,792,737,609,426,298,243,971,843,660, 532,477,349,166,894,711,583,400,272,217,1022,945,817,762,634,451,323,140,996, 868,685,557,502,374,191,919,791,736,608,425,297,242,114,970,842,659,531,476, 348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322,995,867,684,556, 501,373,190,918,790,735,607,424,296,241,969,841,658,530,475,347,164,892,709, 581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,189, 917,789,734,606,423,295,240,112,968,840,657,529,474,346,163,891,708,580,397, 269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733, 605,422,294,239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941, 813,758,630,447,319,136,992,864,681,553,498,370,187,915,787,732,604,421,293, 238,110,966,838,655,527,472,344,161,889,706,578,395,267,212,1017,940,812,757, 629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837, 654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134, 990,862,679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525, 470,342,159,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678, 550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886, 703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 183,911,783,728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391, 263,208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727, 599,416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935, 807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287, 232,960,832,649,521,466,338,883,700,572,389,261,206,1011,934,806,751,623,440, 312,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520, 465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596, 285,230,830,519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723,412, 957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828, 517,462,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333, 878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460,694, 383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331,876,565,510, 199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,692,381,1003,926, 615,304,249,849,538,483,172,772,717,406,951,640,329,874,563,508,197,797,742, 431,120,976,665,354,899,588,277,222,822,767,456,690,379,1001,924,613,302,247, 847,536,481,170,770,715,404,949,638,327,872,561,506,195,795,740,429,118] [views:debug,2014-08-19T16:50:58.287,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/157. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:58.288,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",157,active,0} [ns_server:debug,2014-08-19T16:50:58.362,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 155. Nacking mccouch update. [views:debug,2014-08-19T16:50:58.362,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/155. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:58.362,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",155,active,0} [ns_server:debug,2014-08-19T16:50:58.364,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 689,378,1000,923,612,301,246,974,846,663,535,480,352,169,897,769,714,586,403, 275,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611, 428,300,245,973,845,662,534,479,351,168,896,768,713,585,402,274,219,947,819, 764,636,453,325,142,998,870,687,559,504,376,193,921,793,738,610,427,299,244, 116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635, 452,324,997,869,686,558,503,375,192,920,792,737,609,426,298,243,971,843,660, 532,477,349,166,894,711,583,400,272,217,1022,945,817,762,634,451,323,140,996, 868,685,557,502,374,191,919,791,736,608,425,297,242,114,970,842,659,531,476, 348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322,995,867,684,556, 501,373,190,918,790,735,607,424,296,241,969,841,658,530,475,347,164,892,709, 581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,189, 917,789,734,606,423,295,240,112,968,840,657,529,474,346,163,891,708,580,397, 269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733, 605,422,294,239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941, 813,758,630,447,319,136,992,864,681,553,498,370,187,915,787,732,604,421,293, 238,110,966,838,655,527,472,344,161,889,706,578,395,267,212,1017,940,812,757, 629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837, 654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134, 990,862,679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525, 470,342,159,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678, 550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886, 703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 183,911,783,728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391, 263,208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727, 599,416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935, 807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287, 232,960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623, 440,312,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648, 520,465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907, 596,285,230,830,519,464,698,387,1009,932,621,310,255,855,544,489,178,778,723, 412,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228, 828,517,462,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644, 333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460, 694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331,876,565, 510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,692,381,1003, 926,615,304,249,849,538,483,172,772,717,406,951,640,329,874,563,508,197,797, 742,431,120,976,665,354,899,588,277,222,822,767,456,690,379,1001,924,613,302, 247,847,536,481,170,770,715,404,949,638,327,872,561,506,195,795,740,429,118] [views:debug,2014-08-19T16:50:58.396,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/155. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:58.397,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",155,active,0} [ns_server:debug,2014-08-19T16:50:58.472,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 153. Nacking mccouch update. [views:debug,2014-08-19T16:50:58.472,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/153. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:58.472,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",153,active,0} [ns_server:debug,2014-08-19T16:50:58.474,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 689,378,1000,923,612,301,246,974,846,663,535,480,352,169,897,769,714,586,403, 275,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611, 428,300,245,973,845,662,534,479,351,168,896,768,713,585,402,274,219,947,819, 764,636,453,325,142,998,870,687,559,504,376,193,921,793,738,610,427,299,244, 116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635, 452,324,997,869,686,558,503,375,192,920,792,737,609,426,298,243,971,843,660, 532,477,349,166,894,711,583,400,272,217,1022,945,817,762,634,451,323,140,996, 868,685,557,502,374,191,919,791,736,608,425,297,242,114,970,842,659,531,476, 348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322,995,867,684,556, 501,373,190,918,790,735,607,424,296,241,969,841,658,530,475,347,164,892,709, 581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,189, 917,789,734,606,423,295,240,112,968,840,657,529,474,346,163,891,708,580,397, 269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733, 605,422,294,239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941, 813,758,630,447,319,136,992,864,681,553,498,370,187,915,787,732,604,421,293, 238,110,966,838,655,527,472,344,161,889,706,578,395,267,212,1017,940,812,757, 629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837, 654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134, 990,862,679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525, 470,342,159,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678, 550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886, 703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 183,911,783,728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391, 263,208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727, 599,416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935, 807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287, 232,960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623, 440,312,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648, 520,465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907, 596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778, 723,412,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283, 228,828,517,462,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955, 644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515, 460,694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331,876, 565,510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,692,381, 1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329,874,563,508,197, 797,742,431,120,976,665,354,899,588,277,222,822,767,456,690,379,1001,924,613, 302,247,847,536,481,170,770,715,404,949,638,327,872,561,506,195,795,740,429, 118] [views:debug,2014-08-19T16:50:58.505,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/153. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:58.506,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",153,active,0} [ns_server:debug,2014-08-19T16:50:58.581,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 151. Nacking mccouch update. [views:debug,2014-08-19T16:50:58.581,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/151. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:58.581,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",151,active,0} [ns_server:debug,2014-08-19T16:50:58.583,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 689,378,1000,923,612,301,246,974,846,663,535,480,352,169,897,769,714,586,403, 275,220,948,820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611, 428,300,245,973,845,662,534,479,351,168,896,768,713,585,402,274,219,947,819, 764,636,453,325,142,998,870,687,559,504,376,193,921,793,738,610,427,299,244, 116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635, 452,324,997,869,686,558,503,375,192,920,792,737,609,426,298,243,971,843,660, 532,477,349,166,894,711,583,400,272,217,1022,945,817,762,634,451,323,140,996, 868,685,557,502,374,191,919,791,736,608,425,297,242,114,970,842,659,531,476, 348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322,995,867,684,556, 501,373,190,918,790,735,607,424,296,241,969,841,658,530,475,347,164,892,709, 581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,189, 917,789,734,606,423,295,240,112,968,840,657,529,474,346,163,891,708,580,397, 269,214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733, 605,422,294,239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941, 813,758,630,447,319,136,992,864,681,553,498,370,187,915,787,732,604,421,293, 238,110,966,838,655,527,472,344,161,889,706,578,395,267,212,1017,940,812,757, 629,446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837, 654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134, 990,862,679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525, 470,342,159,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678, 550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886, 703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 183,911,783,728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391, 263,208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727, 599,416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935, 807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287, 232,960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623, 440,312,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648, 520,465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907, 596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778, 723,412,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283, 228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,776,721,410, 955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826, 515,460,694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331, 876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,692, 381,1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329,874,563,508, 197,797,742,431,120,976,665,354,899,588,277,222,822,767,456,690,379,1001,924, 613,302,247,847,536,481,170,770,715,404,949,638,327,872,561,506,195,795,740, 429,118] [views:debug,2014-08-19T16:50:58.615,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/151. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:58.615,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",151,active,0} [ns_server:debug,2014-08-19T16:50:58.690,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 149. Nacking mccouch update. [views:debug,2014-08-19T16:50:58.690,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/149. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:58.690,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",149,active,0} [ns_server:debug,2014-08-19T16:50:58.692,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 689,378,1000,923,612,301,246,846,535,480,169,897,769,714,586,403,275,220,948, 820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,245, 973,845,662,534,479,351,168,896,768,713,585,402,274,219,947,819,764,636,453, 325,142,998,870,687,559,504,376,193,921,793,738,610,427,299,244,116,972,844, 661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635,452,324,997, 869,686,558,503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349, 166,894,711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,557, 502,374,191,919,791,736,608,425,297,242,114,970,842,659,531,476,348,165,893, 710,582,399,271,216,1021,944,816,761,633,450,322,995,867,684,556,501,373,190, 918,790,735,607,424,296,241,969,841,658,530,475,347,164,892,709,581,398,270, 215,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,189,917,789,734, 606,423,295,240,112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019, 942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294, 239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758,630, 447,319,136,992,864,681,553,498,370,187,915,787,732,604,421,293,238,110,966, 838,655,527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446,318, 991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837,654,526,471, 343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679, 551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159, 887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678,550,495,367, 184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703,575,392, 264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783, 728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013, 936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288, 233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960,832, 649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,985, 857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337, 154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230, 830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957, 646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828,517, 462,151,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333, 878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460,149, 694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331,876,565, 510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,692,381,1003, 926,615,304,249,849,538,483,172,772,717,406,951,640,329,874,563,508,197,797, 742,431,120,976,665,354,899,588,277,222,822,767,456,690,379,1001,924,613,302, 247,847,536,481,170,770,715,404,949,638,327,872,561,506,195,795,740,429,118, 974,663,352] [views:debug,2014-08-19T16:50:58.724,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/149. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:58.724,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",149,active,0} [ns_server:debug,2014-08-19T16:50:58.888,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 147. Nacking mccouch update. [views:debug,2014-08-19T16:50:58.888,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/147. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:58.889,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",147,active,0} [ns_server:debug,2014-08-19T16:50:58.890,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 689,378,1000,923,612,301,246,846,535,480,169,897,769,714,586,403,275,220,948, 820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,245, 973,845,662,534,479,351,168,896,768,713,585,402,274,219,947,819,764,636,453, 325,142,998,870,687,559,504,376,193,921,793,738,610,427,299,244,116,972,844, 661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635,452,324,997, 869,686,558,503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349, 166,894,711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,557, 502,374,191,919,791,736,608,425,297,242,114,970,842,659,531,476,348,165,893, 710,582,399,271,216,1021,944,816,761,633,450,322,995,867,684,556,501,373,190, 918,790,735,607,424,296,241,969,841,658,530,475,347,164,892,709,581,398,270, 215,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,189,917,789,734, 606,423,295,240,112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019, 942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294, 239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758,630, 447,319,136,992,864,681,553,498,370,187,915,787,732,604,421,293,238,110,966, 838,655,527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446,318, 991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837,654,526,471, 343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679, 551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159, 887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678,550,495,367, 184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703,575,392, 264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783, 728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013, 936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288, 233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960,832, 649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,985, 857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337, 154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230, 830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957, 646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828,517, 462,151,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333, 878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460,149, 694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331,876,565, 510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,147,692,381, 1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329,874,563,508,197, 797,742,431,120,976,665,354,899,588,277,222,822,767,456,690,379,1001,924,613, 302,247,847,536,481,170,770,715,404,949,638,327,872,561,506,195,795,740,429, 118,974,663,352] [views:debug,2014-08-19T16:50:58.964,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/147. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:58.964,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",147,active,0} [ns_server:debug,2014-08-19T16:50:59.139,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 145. Nacking mccouch update. [views:debug,2014-08-19T16:50:59.139,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/145. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:59.140,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",145,active,0} [ns_server:debug,2014-08-19T16:50:59.141,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 689,378,1000,923,612,301,246,846,535,480,169,897,769,714,586,403,275,220,948, 820,765,637,454,326,999,871,688,560,505,377,194,922,794,739,611,428,300,245, 973,845,662,534,479,351,168,896,768,713,585,402,274,219,947,819,764,636,453, 325,142,998,870,687,559,504,376,193,921,793,738,610,427,299,244,116,972,844, 661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635,452,324,997, 869,686,558,503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349, 166,894,711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,557, 502,374,191,919,791,736,608,425,297,242,114,970,842,659,531,476,348,165,893, 710,582,399,271,216,1021,944,816,761,633,450,322,995,867,684,556,501,373,190, 918,790,735,607,424,296,241,969,841,658,530,475,347,164,892,709,581,398,270, 215,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,189,917,789,734, 606,423,295,240,112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019, 942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294, 239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758,630, 447,319,136,992,864,681,553,498,370,187,915,787,732,604,421,293,238,110,966, 838,655,527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446,318, 991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837,654,526,471, 343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679, 551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159, 887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678,550,495,367, 184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703,575,392, 264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783, 728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013, 936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288, 233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960,832, 649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,985, 857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337, 154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230, 830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957, 646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828,517, 462,151,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333, 878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460,149, 694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331,876,565, 510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,147,692,381, 1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329,874,563,508,197, 797,742,431,120,976,665,354,899,588,277,222,822,767,456,145,690,379,1001,924, 613,302,247,847,536,481,170,770,715,404,949,638,327,872,561,506,195,795,740, 429,118,974,663,352] [ns_server:info,2014-08-19T16:50:59.156,ns_1@10.242.238.88:<0.20999.0>:ns_orchestrator:handle_info:428]Skipping janitor in state rebalancing [views:debug,2014-08-19T16:50:59.198,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/145. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:59.198,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",145,active,0} [ns_server:debug,2014-08-19T16:50:59.357,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 143. Nacking mccouch update. [views:debug,2014-08-19T16:50:59.357,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/143. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:59.357,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",143,active,0} [ns_server:debug,2014-08-19T16:50:59.359,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 689,378,1000,923,612,301,246,846,535,480,169,897,769,714,586,403,275,220,948, 820,765,637,454,326,143,999,871,688,560,505,377,194,922,794,739,611,428,300, 245,973,845,662,534,479,351,168,896,768,713,585,402,274,219,947,819,764,636, 453,325,142,998,870,687,559,504,376,193,921,793,738,610,427,299,244,116,972, 844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635,452,324, 997,869,686,558,503,375,192,920,792,737,609,426,298,243,971,843,660,532,477, 349,166,894,711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685, 557,502,374,191,919,791,736,608,425,297,242,114,970,842,659,531,476,348,165, 893,710,582,399,271,216,1021,944,816,761,633,450,322,995,867,684,556,501,373, 190,918,790,735,607,424,296,241,969,841,658,530,475,347,164,892,709,581,398, 270,215,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,189,917,789, 734,606,423,295,240,112,968,840,657,529,474,346,163,891,708,580,397,269,214, 1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422, 294,239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758, 630,447,319,136,992,864,681,553,498,370,187,915,787,732,604,421,293,238,110, 966,838,655,527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446, 318,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837,654,526, 471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862, 679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342, 159,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678,550,495, 367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703,575, 392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911, 783,728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752, 624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960, 832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312, 985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465, 337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285, 230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723,412, 957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828, 517,462,151,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644, 333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460, 149,694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331,876, 565,510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,147,692, 381,1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329,874,563,508, 197,797,742,431,120,976,665,354,899,588,277,222,822,767,456,145,690,379,1001, 924,613,302,247,847,536,481,170,770,715,404,949,638,327,872,561,506,195,795, 740,429,118,974,663,352] [views:debug,2014-08-19T16:50:59.424,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/143. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:59.424,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",143,active,0} [ns_server:debug,2014-08-19T16:50:59.566,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 141. Nacking mccouch update. [views:debug,2014-08-19T16:50:59.566,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/141. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:59.566,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",141,active,0} [ns_server:debug,2014-08-19T16:50:59.568,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 689,378,1000,923,612,301,246,846,535,480,169,897,769,714,586,403,275,220,948, 820,765,637,454,326,143,999,871,688,560,505,377,194,922,794,739,611,428,300, 245,973,845,662,534,479,351,168,896,768,713,585,402,274,219,947,819,764,636, 453,325,142,998,870,687,559,504,376,193,921,793,738,610,427,299,244,116,972, 844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635,452,324, 141,997,869,686,558,503,375,192,920,792,737,609,426,298,243,971,843,660,532, 477,349,166,894,711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868, 685,557,502,374,191,919,791,736,608,425,297,242,114,970,842,659,531,476,348, 165,893,710,582,399,271,216,1021,944,816,761,633,450,322,995,867,684,556,501, 373,190,918,790,735,607,424,296,241,969,841,658,530,475,347,164,892,709,581, 398,270,215,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,189,917, 789,734,606,423,295,240,112,968,840,657,529,474,346,163,891,708,580,397,269, 214,1019,942,814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605, 422,294,239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813, 758,630,447,319,136,992,864,681,553,498,370,187,915,787,732,604,421,293,238, 110,966,838,655,527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629, 446,318,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837,654, 526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990, 862,679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470, 342,159,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678,550, 495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703, 575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183, 911,783,728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263, 208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599, 416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807, 752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232, 960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440, 312,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520, 465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596, 285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723, 412,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228, 828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955, 644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515, 460,149,694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331, 876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,147, 692,381,1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329,874,563, 508,197,797,742,431,120,976,665,354,899,588,277,222,822,767,456,145,690,379, 1001,924,613,302,247,847,536,481,170,770,715,404,949,638,327,872,561,506,195, 795,740,429,118,974,663,352] [views:debug,2014-08-19T16:50:59.633,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/141. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:59.634,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",141,active,0} [ns_server:debug,2014-08-19T16:50:59.782,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 139. Nacking mccouch update. [views:debug,2014-08-19T16:50:59.782,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/139. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:59.782,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",139,active,0} [ns_server:debug,2014-08-19T16:50:59.784,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 689,378,1000,923,612,301,246,846,535,480,169,769,714,403,948,820,765,637,454, 326,143,999,871,688,560,505,377,194,922,794,739,611,428,300,245,973,845,662, 534,479,351,168,896,768,713,585,402,274,219,947,819,764,636,453,325,142,998, 870,687,559,504,376,193,921,793,738,610,427,299,244,116,972,844,661,533,478, 350,167,895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997,869,686, 558,503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894, 711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374, 191,919,791,736,608,425,297,242,114,970,842,659,531,476,348,165,893,710,582, 399,271,216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918, 790,735,607,424,296,241,969,841,658,530,475,347,164,892,709,581,398,270,215, 1020,943,815,760,632,449,321,138,994,866,683,555,500,372,189,917,789,734,606, 423,295,240,112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019,942, 814,759,631,448,320,993,865,682,554,499,371,188,916,788,733,605,422,294,239, 967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758,630,447, 319,136,992,864,681,553,498,370,187,915,787,732,604,421,293,238,110,966,838, 655,527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446,318,991, 863,680,552,497,369,186,914,786,731,603,420,292,237,965,837,654,526,471,343, 160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551, 496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887, 704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678,550,495,367,184, 912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703,575,392,264, 209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783,728, 600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936, 808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288,233, 961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752,624,441, 313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960,832,649, 521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,985,857, 674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337,154, 882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230,830, 519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957,646, 335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828,517,462, 151,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333,878, 567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460,149,694, 383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331,876,565,510, 199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,147,692,381,1003, 926,615,304,249,849,538,483,172,772,717,406,951,640,329,874,563,508,197,797, 742,431,120,976,665,354,899,588,277,222,822,767,456,145,690,379,1001,924,613, 302,247,847,536,481,170,770,715,404,949,638,327,872,561,506,195,795,740,429, 118,974,663,352,897,586,275,220] [views:debug,2014-08-19T16:50:59.816,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/139. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:59.816,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",139,active,0} [ns_server:debug,2014-08-19T16:50:59.891,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 137. Nacking mccouch update. [views:debug,2014-08-19T16:50:59.891,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/137. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:59.892,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",137,active,0} [ns_server:debug,2014-08-19T16:50:59.893,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 689,378,1000,923,612,301,246,846,535,480,169,769,714,403,948,820,765,637,454, 326,143,999,871,688,560,505,377,194,922,794,739,611,428,300,245,973,845,662, 534,479,351,168,896,768,713,585,402,274,219,947,819,764,636,453,325,142,998, 870,687,559,504,376,193,921,793,738,610,427,299,244,116,972,844,661,533,478, 350,167,895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997,869,686, 558,503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894, 711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374, 191,919,791,736,608,425,297,242,114,970,842,659,531,476,348,165,893,710,582, 399,271,216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918, 790,735,607,424,296,241,969,841,658,530,475,347,164,892,709,581,398,270,215, 1020,943,815,760,632,449,321,138,994,866,683,555,500,372,189,917,789,734,606, 423,295,240,112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019,942, 814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422,294, 239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758,630, 447,319,136,992,864,681,553,498,370,187,915,787,732,604,421,293,238,110,966, 838,655,527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446,318, 991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837,654,526,471, 343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679, 551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159, 887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678,550,495,367, 184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703,575,392, 264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783, 728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013, 936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416,288, 233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960,832, 649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,985, 857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337, 154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230, 830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957, 646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828,517, 462,151,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333, 878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460,149, 694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331,876,565, 510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,147,692,381, 1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329,874,563,508,197, 797,742,431,120,976,665,354,899,588,277,222,822,767,456,145,690,379,1001,924, 613,302,247,847,536,481,170,770,715,404,949,638,327,872,561,506,195,795,740, 429,118,974,663,352,897,586,275,220] [views:debug,2014-08-19T16:50:59.925,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/137. Updated state: active (0) [ns_server:debug,2014-08-19T16:50:59.925,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",137,active,0} [ns_server:debug,2014-08-19T16:51:00.000,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 135. Nacking mccouch update. [views:debug,2014-08-19T16:51:00.000,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/135. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:00.001,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",135,active,0} [ns_server:debug,2014-08-19T16:51:00.002,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 689,378,1000,923,612,301,246,846,535,480,169,769,714,403,948,820,765,637,454, 326,143,999,871,688,560,505,377,194,922,794,739,611,428,300,245,973,845,662, 534,479,351,168,896,768,713,585,402,274,219,947,819,764,636,453,325,142,998, 870,687,559,504,376,193,921,793,738,610,427,299,244,116,972,844,661,533,478, 350,167,895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997,869,686, 558,503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894, 711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374, 191,919,791,736,608,425,297,242,114,970,842,659,531,476,348,165,893,710,582, 399,271,216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918, 790,735,607,424,296,241,969,841,658,530,475,347,164,892,709,581,398,270,215, 1020,943,815,760,632,449,321,138,994,866,683,555,500,372,189,917,789,734,606, 423,295,240,112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019,942, 814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422,294, 239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758,630, 447,319,136,992,864,681,553,498,370,187,915,787,732,604,421,293,238,110,966, 838,655,527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446,318, 135,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837,654,526, 471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862, 679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342, 159,887,704,576,393,265,210,1015,938,810,755,627,444,316,989,861,678,550,495, 367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703,575, 392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911, 783,728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208, 1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599,416, 288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752, 624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960, 832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312, 985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465, 337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285, 230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723,412, 957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828, 517,462,151,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644, 333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460, 149,694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331,876, 565,510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,147,692, 381,1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329,874,563,508, 197,797,742,431,120,976,665,354,899,588,277,222,822,767,456,145,690,379,1001, 924,613,302,247,847,536,481,170,770,715,404,949,638,327,872,561,506,195,795, 740,429,118,974,663,352,897,586,275,220] [views:debug,2014-08-19T16:51:00.034,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/135. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:00.035,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",135,active,0} [ns_server:debug,2014-08-19T16:51:00.109,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 133. Nacking mccouch update. [views:debug,2014-08-19T16:51:00.109,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/133. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:00.110,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",133,active,0} [ns_server:debug,2014-08-19T16:51:00.111,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 689,378,1000,923,612,301,246,846,535,480,169,769,714,403,948,820,765,637,454, 326,143,999,871,688,560,505,377,194,922,794,739,611,428,300,245,973,845,662, 534,479,351,168,896,768,713,585,402,274,219,947,819,764,636,453,325,142,998, 870,687,559,504,376,193,921,793,738,610,427,299,244,116,972,844,661,533,478, 350,167,895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997,869,686, 558,503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894, 711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374, 191,919,791,736,608,425,297,242,114,970,842,659,531,476,348,165,893,710,582, 399,271,216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918, 790,735,607,424,296,241,969,841,658,530,475,347,164,892,709,581,398,270,215, 1020,943,815,760,632,449,321,138,994,866,683,555,500,372,189,917,789,734,606, 423,295,240,112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019,942, 814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422,294, 239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758,630, 447,319,136,992,864,681,553,498,370,187,915,787,732,604,421,293,238,110,966, 838,655,527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446,318, 135,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837,654,526, 471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862, 679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342, 159,887,704,576,393,265,210,1015,938,810,755,627,444,316,133,989,861,678,550, 495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703, 575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183, 911,783,728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263, 208,1013,936,808,753,625,442,314,987,859,676,548,493,365,182,910,782,727,599, 416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807, 752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232, 960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440, 312,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520, 465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596, 285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723, 412,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228, 828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955, 644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515, 460,149,694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331, 876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,147, 692,381,1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329,874,563, 508,197,797,742,431,120,976,665,354,899,588,277,222,822,767,456,145,690,379, 1001,924,613,302,247,847,536,481,170,770,715,404,949,638,327,872,561,506,195, 795,740,429,118,974,663,352,897,586,275,220] [views:debug,2014-08-19T16:51:00.143,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/133. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:00.143,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",133,active,0} [ns_server:debug,2014-08-19T16:51:00.218,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 131. Nacking mccouch update. [views:debug,2014-08-19T16:51:00.218,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/131. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:00.219,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",131,active,0} [ns_server:debug,2014-08-19T16:51:00.221,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 689,378,1000,923,612,301,246,846,535,480,169,769,714,403,948,820,765,637,454, 326,143,999,871,688,560,505,377,194,922,794,739,611,428,300,245,973,845,662, 534,479,351,168,896,768,713,585,402,274,219,947,819,764,636,453,325,142,998, 870,687,559,504,376,193,921,793,738,610,427,299,244,116,972,844,661,533,478, 350,167,895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997,869,686, 558,503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894, 711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374, 191,919,791,736,608,425,297,242,114,970,842,659,531,476,348,165,893,710,582, 399,271,216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918, 790,735,607,424,296,241,969,841,658,530,475,347,164,892,709,581,398,270,215, 1020,943,815,760,632,449,321,138,994,866,683,555,500,372,189,917,789,734,606, 423,295,240,112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019,942, 814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422,294, 239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758,630, 447,319,136,992,864,681,553,498,370,187,915,787,732,604,421,293,238,110,966, 838,655,527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446,318, 135,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837,654,526, 471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862, 679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342, 159,887,704,576,393,265,210,1015,938,810,755,627,444,316,133,989,861,678,550, 495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703, 575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183, 911,783,728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263, 208,1013,936,808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727, 599,416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935, 807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287, 232,960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623, 440,312,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648, 520,465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907, 596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778, 723,412,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283, 228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,776,721,410, 955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826, 515,460,149,694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642, 331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458, 147,692,381,1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329,874, 563,508,197,797,742,431,120,976,665,354,899,588,277,222,822,767,456,145,690, 379,1001,924,613,302,247,847,536,481,170,770,715,404,949,638,327,872,561,506, 195,795,740,429,118,974,663,352,897,586,275,220] [views:debug,2014-08-19T16:51:00.252,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/131. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:00.253,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",131,active,0} [ns_server:debug,2014-08-19T16:51:00.392,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 129. Nacking mccouch update. [views:debug,2014-08-19T16:51:00.392,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/129. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:00.392,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",129,active,0} [ns_server:debug,2014-08-19T16:51:00.394,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309,254, 854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981,670, 359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541,486, 175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902,591, 280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773,718, 407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223,823, 512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950,639, 328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455,144, 689,378,1000,923,612,301,246,846,535,480,169,769,714,403,948,637,326,999,871, 688,560,505,377,194,922,794,739,611,428,300,245,973,845,662,534,479,351,168, 896,768,713,585,402,274,219,947,819,764,636,453,325,142,998,870,687,559,504, 376,193,921,793,738,610,427,299,244,116,972,844,661,533,478,350,167,895,712, 584,401,273,218,1023,946,818,763,635,452,324,141,997,869,686,558,503,375,192, 920,792,737,609,426,298,243,971,843,660,532,477,349,166,894,711,583,400,272, 217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,191,919,791,736, 608,425,297,242,114,970,842,659,531,476,348,165,893,710,582,399,271,216,1021, 944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790,735,607,424, 296,241,969,841,658,530,475,347,164,892,709,581,398,270,215,1020,943,815,760, 632,449,321,138,994,866,683,555,500,372,189,917,789,734,606,423,295,240,112, 968,840,657,529,474,346,163,891,708,580,397,269,214,1019,942,814,759,631,448, 320,137,993,865,682,554,499,371,188,916,788,733,605,422,294,239,967,839,656, 528,473,345,162,890,707,579,396,268,213,1018,941,813,758,630,447,319,136,992, 864,681,553,498,370,187,915,787,732,604,421,293,238,110,966,838,655,527,472, 344,161,889,706,578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680, 552,497,369,186,914,786,731,603,420,292,237,965,837,654,526,471,343,160,888, 705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496,368, 185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576, 393,265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912, 784,729,601,418,290,235,963,835,652,524,469,341,158,886,703,575,392,264,209, 1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783,728,600, 417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808, 753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233, 961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752,624,441, 313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960,832,649, 521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,129,985, 857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465,337, 154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230, 830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,957, 646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828,517, 462,151,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644,333, 878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460,149, 694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331,876,565, 510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,147,692,381, 1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329,874,563,508,197, 797,742,431,120,976,665,354,899,588,277,222,822,767,456,145,690,379,1001,924, 613,302,247,847,536,481,170,770,715,404,949,638,327,872,561,506,195,795,740, 429,118,974,663,352,897,586,275,220,820,765,454,143] [views:debug,2014-08-19T16:51:00.459,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/129. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:00.459,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",129,active,0} [ns_server:debug,2014-08-19T16:51:00.618,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 127. Nacking mccouch update. [views:debug,2014-08-19T16:51:00.618,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/127. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:00.618,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",127,active,0} [ns_server:debug,2014-08-19T16:51:00.620,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,981, 670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852,541, 486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357,902, 591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173,773, 718,407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278,223, 823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405,950, 639,328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766,455, 144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403,948,637,326,999, 871,688,560,505,377,194,922,794,739,611,428,300,245,973,845,662,534,479,351, 168,896,768,713,585,402,274,219,947,819,764,636,453,325,142,998,870,687,559, 504,376,193,921,793,738,610,427,299,244,116,972,844,661,533,478,350,167,895, 712,584,401,273,218,1023,946,818,763,635,452,324,141,997,869,686,558,503,375, 192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894,711,583,400, 272,217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,191,919,791, 736,608,425,297,242,114,970,842,659,531,476,348,165,893,710,582,399,271,216, 1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790,735,607, 424,296,241,969,841,658,530,475,347,164,892,709,581,398,270,215,1020,943,815, 760,632,449,321,138,994,866,683,555,500,372,189,917,789,734,606,423,295,240, 112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019,942,814,759,631, 448,320,137,993,865,682,554,499,371,188,916,788,733,605,422,294,239,967,839, 656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758,630,447,319,136, 992,864,681,553,498,370,187,915,787,732,604,421,293,238,110,966,838,655,527, 472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446,318,135,991,863, 680,552,497,369,186,914,786,731,603,420,292,237,965,837,654,526,471,343,160, 888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496, 368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704, 576,393,265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184, 912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703,575,392,264, 209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783,728, 600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936, 808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288, 233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960,832, 649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,129, 985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520,465, 337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285, 230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723,412, 957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828, 517,462,151,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955,644, 333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515,460, 149,694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331,876, 565,510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,147,692, 381,1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329,874,563,508, 197,797,742,431,120,976,665,354,899,588,277,222,822,767,456,145,690,379,1001, 924,613,302,247,847,536,481,170,770,715,404,949,638,327,872,561,506,195,795, 740,429,118,974,663,352,897,586,275,220,820,765,454,143] [views:debug,2014-08-19T16:51:00.685,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/127. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:00.686,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",127,active,0} [ns_server:debug,2014-08-19T16:51:00.794,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 125. Nacking mccouch update. [views:debug,2014-08-19T16:51:00.794,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/125. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:00.794,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",125,active,0} [ns_server:debug,2014-08-19T16:51:00.796,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,979,668,357, 902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484,173, 773,718,407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589,278, 223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716,405, 950,639,328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821,766, 455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403,948,637,326, 999,871,688,560,505,377,194,922,794,739,611,428,300,245,973,845,662,534,479, 351,168,896,768,713,585,402,274,219,947,819,764,636,453,325,142,998,870,687, 559,504,376,193,921,793,738,610,427,299,244,116,972,844,661,533,478,350,167, 895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997,869,686,558,503, 375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894,711,583, 400,272,217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,191,919, 791,736,608,425,297,242,114,970,842,659,531,476,348,165,893,710,582,399,271, 216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790,735, 607,424,296,241,969,841,658,530,475,347,164,892,709,581,398,270,215,1020,943, 815,760,632,449,321,138,994,866,683,555,500,372,189,917,789,734,606,423,295, 240,112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019,942,814,759, 631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422,294,239,967, 839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758,630,447,319, 136,992,864,681,553,498,370,187,915,787,732,604,421,293,238,110,966,838,655, 527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446,318,135,991, 863,680,552,497,369,186,914,786,731,603,420,292,237,965,837,654,526,471,343, 160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551, 496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887, 704,576,393,265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367, 184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703,575,392, 264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783, 728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013, 936,808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416, 288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752, 624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960, 832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312, 129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520, 465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596, 285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723, 412,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228, 828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955, 644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515, 460,149,694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331, 876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,147, 692,381,1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329,874,563, 508,197,797,742,431,120,976,665,354,899,588,277,222,822,767,456,145,690,379, 1001,924,613,302,247,847,536,481,170,770,715,404,949,638,327,872,561,506,195, 795,740,429,118,974,663,352,897,586,275,220,820,765,454,143] [views:debug,2014-08-19T16:51:00.827,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/125. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:00.828,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",125,active,0} [ns_server:debug,2014-08-19T16:51:00.929,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 123. Nacking mccouch update. [views:debug,2014-08-19T16:51:00.929,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/123. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:00.930,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",123,active,0} [ns_server:debug,2014-08-19T16:51:00.931,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,977,666,355,900,589, 278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771,716, 405,950,639,328,873,562,507,196,796,741,430,975,664,353,898,587,276,221,821, 766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403,948,637, 326,999,871,688,560,505,377,194,922,794,739,611,428,300,245,973,845,662,534, 479,351,168,896,768,713,585,402,274,219,947,819,764,636,453,325,142,998,870, 687,559,504,376,193,921,793,738,610,427,299,244,116,972,844,661,533,478,350, 167,895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997,869,686,558, 503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894,711, 583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,191, 919,791,736,608,425,297,242,114,970,842,659,531,476,348,165,893,710,582,399, 271,216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790, 735,607,424,296,241,969,841,658,530,475,347,164,892,709,581,398,270,215,1020, 943,815,760,632,449,321,138,994,866,683,555,500,372,189,917,789,734,606,423, 295,240,112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019,942,814, 759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422,294,239, 967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758,630,447, 319,136,992,864,681,553,498,370,187,915,787,732,604,421,293,238,110,966,838, 655,527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446,318,135, 991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837,654,526,471, 343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679, 551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159, 887,704,576,393,265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495, 367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703,575, 392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911, 783,728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208, 1013,936,808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599, 416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807, 752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232, 960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440, 312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648, 520,465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907, 596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778, 723,412,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283, 228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,776,721,410, 955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826, 515,460,149,694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642, 331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458, 147,692,381,1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329,874, 563,508,197,797,742,431,120,976,665,354,899,588,277,222,822,767,456,145,690, 379,1001,924,613,302,247,847,536,481,170,770,715,404,949,638,327,872,561,506, 195,795,740,429,118,974,663,352,897,586,275,220,820,765,454,143] [views:debug,2014-08-19T16:51:00.963,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/123. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:00.963,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",123,active,0} [ns_server:debug,2014-08-19T16:51:01.111,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 121. Nacking mccouch update. [views:debug,2014-08-19T16:51:01.111,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/121. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:01.111,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",121,active,0} [ns_server:debug,2014-08-19T16:51:01.113,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,975,664,353,898,587,276,221, 821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403,948, 637,326,999,871,688,560,505,377,194,922,794,739,611,428,300,245,973,845,662, 534,479,351,168,896,768,713,585,402,274,219,947,819,764,636,453,325,142,998, 870,687,559,504,376,193,921,793,738,610,427,299,244,116,972,844,661,533,478, 350,167,895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997,869,686, 558,503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894, 711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374, 191,919,791,736,608,425,297,242,114,970,842,659,531,476,348,165,893,710,582, 399,271,216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918, 790,735,607,424,296,241,969,841,658,530,475,347,164,892,709,581,398,270,215, 1020,943,815,760,632,449,321,138,994,866,683,555,500,372,189,917,789,734,606, 423,295,240,112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019,942, 814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422,294, 239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758,630, 447,319,136,992,864,681,553,498,370,187,915,787,732,604,421,293,238,110,966, 838,655,527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446,318, 135,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837,654,526, 471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862, 679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342, 159,887,704,576,393,265,210,1015,938,810,755,627,444,316,133,989,861,678,550, 495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703, 575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183, 911,783,728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263, 208,1013,936,808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727, 599,416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935, 807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287, 232,960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623, 440,312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831, 648,520,465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362, 907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178, 778,723,412,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594, 283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,776,721, 410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226, 826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953, 642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824,513, 458,147,692,381,1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329, 874,563,508,197,797,742,431,120,976,665,354,899,588,277,222,822,767,456,145, 690,379,1001,924,613,302,247,847,536,481,170,770,715,404,949,638,327,872,561, 506,195,795,740,429,118,974,663,352,897,586,275,220,820,765,454,143] [views:debug,2014-08-19T16:51:01.186,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/121. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:01.187,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",121,active,0} [ns_server:debug,2014-08-19T16:51:01.338,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 119. Nacking mccouch update. [views:debug,2014-08-19T16:51:01.338,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/119. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:01.338,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",119,active,0} [ns_server:debug,2014-08-19T16:51:01.340,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,922,794,739,611,428,300,245,973,845,662,534,479, 351,168,896,768,713,585,402,274,219,947,819,764,636,453,325,142,998,870,687, 559,504,376,193,921,793,738,610,427,299,244,116,972,844,661,533,478,350,167, 895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997,869,686,558,503, 375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894,711,583, 400,272,217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,191,919, 791,736,608,425,297,242,114,970,842,659,531,476,348,165,893,710,582,399,271, 216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790,735, 607,424,296,241,969,841,658,530,475,347,164,892,709,581,398,270,215,1020,943, 815,760,632,449,321,138,994,866,683,555,500,372,189,917,789,734,606,423,295, 240,112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019,942,814,759, 631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422,294,239,967, 839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758,630,447,319, 136,992,864,681,553,498,370,187,915,787,732,604,421,293,238,110,966,838,655, 527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446,318,135,991, 863,680,552,497,369,186,914,786,731,603,420,292,237,965,837,654,526,471,343, 160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551, 496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887, 704,576,393,265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367, 184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703,575,392, 264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783, 728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013, 936,808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416, 288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752, 624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960, 832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312, 129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648,520, 465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596, 285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723, 412,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228, 828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,776,721,410,955, 644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515, 460,149,694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642,331, 876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458,147, 692,381,1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329,874,563, 508,197,797,742,431,120,976,665,354,899,588,277,222,822,767,456,145,690,379, 1001,924,613,302,247,847,536,481,170,770,715,404,949,638,327,872,561,506,195, 795,740,429,118,974,663,352,897,586,275,220,820,765,454,143,999,688,377] [views:debug,2014-08-19T16:51:01.413,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/119. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:01.414,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",119,active,0} [ns_server:debug,2014-08-19T16:51:01.563,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 117. Nacking mccouch update. [views:debug,2014-08-19T16:51:01.564,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/117. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:01.564,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",117,active,0} [ns_server:debug,2014-08-19T16:51:01.566,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,922,794,739,611,428,300,245,117,973,845,662,534, 479,351,168,896,768,713,585,402,274,219,947,819,764,636,453,325,142,998,870, 687,559,504,376,193,921,793,738,610,427,299,244,116,972,844,661,533,478,350, 167,895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997,869,686,558, 503,375,192,920,792,737,609,426,298,243,971,843,660,532,477,349,166,894,711, 583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,191, 919,791,736,608,425,297,242,114,970,842,659,531,476,348,165,893,710,582,399, 271,216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790, 735,607,424,296,241,969,841,658,530,475,347,164,892,709,581,398,270,215,1020, 943,815,760,632,449,321,138,994,866,683,555,500,372,189,917,789,734,606,423, 295,240,112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019,942,814, 759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422,294,239, 967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758,630,447, 319,136,992,864,681,553,498,370,187,915,787,732,604,421,293,238,110,966,838, 655,527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446,318,135, 991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837,654,526,471, 343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679, 551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159, 887,704,576,393,265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495, 367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703,575, 392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911, 783,728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208, 1013,936,808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599, 416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807, 752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232, 960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440, 312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648, 520,465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907, 596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778, 723,412,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283, 228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,776,721,410, 955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826, 515,460,149,694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642, 331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458, 147,692,381,1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329,874, 563,508,197,797,742,431,120,976,665,354,899,588,277,222,822,767,456,145,690, 379,1001,924,613,302,247,847,536,481,170,770,715,404,949,638,327,872,561,506, 195,795,740,429,118,974,663,352,897,586,275,220,820,765,454,143,999,688,377] [rebalance:info,2014-08-19T16:51:01.644,ns_1@10.242.238.88:<0.14802.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 396 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:01.644,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 396 state to active [rebalance:info,2014-08-19T16:51:01.646,ns_1@10.242.238.88:<0.14802.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 396 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:01.646,ns_1@10.242.238.88:<0.14802.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:51:01.647,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/117. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:01.648,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",117,active,0} [ns_server:debug,2014-08-19T16:51:01.831,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 115. Nacking mccouch update. [views:debug,2014-08-19T16:51:01.831,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/115. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:01.832,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",115,active,0} [ns_server:debug,2014-08-19T16:51:01.833,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,922,794,739,611,428,300,245,117,973,845,662,534, 479,351,168,896,768,713,585,402,274,219,947,819,764,636,453,325,142,998,870, 687,559,504,376,193,921,793,738,610,427,299,244,116,972,844,661,533,478,350, 167,895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997,869,686,558, 503,375,192,920,792,737,609,426,298,243,115,971,843,660,532,477,349,166,894, 711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374, 191,919,791,736,608,425,297,242,114,970,842,659,531,476,348,165,893,710,582, 399,271,216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918, 790,735,607,424,296,241,969,841,658,530,475,347,164,892,709,581,398,270,215, 1020,943,815,760,632,449,321,138,994,866,683,555,500,372,189,917,789,734,606, 423,295,240,112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019,942, 814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422,294, 239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758,630, 447,319,136,992,864,681,553,498,370,187,915,787,732,604,421,293,238,110,966, 838,655,527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446,318, 135,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837,654,526, 471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862, 679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342, 159,887,704,576,393,265,210,1015,938,810,755,627,444,316,133,989,861,678,550, 495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703, 575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183, 911,783,728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263, 208,1013,936,808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727, 599,416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935, 807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287, 232,960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623, 440,312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831, 648,520,465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362, 907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178, 778,723,412,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594, 283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,776,721, 410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226, 826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953, 642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824,513, 458,147,692,381,1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329, 874,563,508,197,797,742,431,120,976,665,354,899,588,277,222,822,767,456,145, 690,379,1001,924,613,302,247,847,536,481,170,770,715,404,949,638,327,872,561, 506,195,795,740,429,118,974,663,352,897,586,275,220,820,765,454,143,999,688, 377] [views:debug,2014-08-19T16:51:01.915,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/115. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:01.915,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",115,active,0} [ns_server:debug,2014-08-19T16:51:02.090,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 113. Nacking mccouch update. [views:debug,2014-08-19T16:51:02.090,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/113. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:02.091,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",113,active,0} [ns_server:debug,2014-08-19T16:51:02.092,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,922,794,739,611,428,300,245,117,973,845,662,534, 479,351,168,896,768,713,585,402,274,219,947,819,764,636,453,325,142,998,870, 687,559,504,376,193,921,793,738,610,427,299,244,116,972,844,661,533,478,350, 167,895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997,869,686,558, 503,375,192,920,792,737,609,426,298,243,115,971,843,660,532,477,349,166,894, 711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374, 191,919,791,736,608,425,297,242,114,970,842,659,531,476,348,165,893,710,582, 399,271,216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918, 790,735,607,424,296,241,113,969,841,658,530,475,347,164,892,709,581,398,270, 215,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,189,917,789,734, 606,423,295,240,112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019, 942,814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422, 294,239,967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758, 630,447,319,136,992,864,681,553,498,370,187,915,787,732,604,421,293,238,110, 966,838,655,527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446, 318,135,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837,654, 526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990, 862,679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470, 342,159,887,704,576,393,265,210,1015,938,810,755,627,444,316,133,989,861,678, 550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886, 703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366, 183,911,783,728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391, 263,208,1013,936,808,753,625,442,314,131,987,859,676,548,493,365,182,910,782, 727,599,416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012, 935,807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415, 287,232,960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751, 623,440,312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959, 831,648,520,465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673, 362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489, 178,778,723,412,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905, 594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,776, 721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281, 226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,774,719,408, 953,642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824, 513,458,147,692,381,1003,926,615,304,249,849,538,483,172,772,717,406,951,640, 329,874,563,508,197,797,742,431,120,976,665,354,899,588,277,222,822,767,456, 145,690,379,1001,924,613,302,247,847,536,481,170,770,715,404,949,638,327,872, 561,506,195,795,740,429,118,974,663,352,897,586,275,220,820,765,454,143,999, 688,377] [views:debug,2014-08-19T16:51:02.155,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/113. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:02.156,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",113,active,0} [ns_server:debug,2014-08-19T16:51:02.255,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 111. Nacking mccouch update. [views:debug,2014-08-19T16:51:02.255,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/111. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:02.255,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",111,active,0} [ns_server:debug,2014-08-19T16:51:02.258,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,922,794,739,611,428,300,245,117,973,845,662,534, 479,351,168,896,768,713,585,402,274,219,947,819,764,636,453,325,142,998,870, 687,559,504,376,193,921,793,738,610,427,299,244,116,972,844,661,533,478,350, 167,895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997,869,686,558, 503,375,192,920,792,737,609,426,298,243,115,971,843,660,532,477,349,166,894, 711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374, 191,919,791,736,608,425,297,242,114,970,842,659,531,476,348,165,893,710,582, 399,271,216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918, 790,735,607,424,296,241,113,969,841,658,530,475,347,164,892,709,581,398,270, 215,1020,943,815,760,632,449,321,138,994,866,683,555,500,372,189,917,789,734, 606,423,295,240,112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019, 942,814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422, 294,239,111,967,839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813, 758,630,447,319,136,992,864,681,553,498,370,187,915,787,732,604,421,293,238, 110,966,838,655,527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629, 446,318,135,991,863,680,552,497,369,186,914,786,731,603,420,292,237,965,837, 654,526,471,343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134, 990,862,679,551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525, 470,342,159,887,704,576,393,265,210,1015,938,810,755,627,444,316,133,989,861, 678,550,495,367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158, 886,703,575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494, 366,183,911,783,728,600,417,289,234,962,834,651,523,468,340,157,885,702,574, 391,263,208,1013,936,808,753,625,442,314,131,987,859,676,548,493,365,182,910, 782,727,599,416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207, 1012,935,807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598, 415,287,232,960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806, 751,623,440,312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231, 959,831,648,520,465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984, 673,362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544, 489,178,778,723,412,957,646,335,880,569,258,203,803,748,437,126,982,671,360, 905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176, 776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592, 281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,774,719, 408,953,642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224, 824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,772,717,406,951, 640,329,874,563,508,197,797,742,431,120,976,665,354,899,588,277,222,822,767, 456,145,690,379,1001,924,613,302,247,847,536,481,170,770,715,404,949,638,327, 872,561,506,195,795,740,429,118,974,663,352,897,586,275,220,820,765,454,143, 999,688,377] [views:debug,2014-08-19T16:51:02.290,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/111. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:02.290,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",111,active,0} [ns_server:debug,2014-08-19T16:51:02.364,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 109. Nacking mccouch update. [views:debug,2014-08-19T16:51:02.364,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/109. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:02.365,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",109,active,0} [ns_server:debug,2014-08-19T16:51:02.367,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,794,739,428,117,973,845,662,534,479,351,168,896, 768,713,585,402,274,219,947,819,764,636,453,325,142,998,870,687,559,504,376, 193,921,793,738,610,427,299,244,116,972,844,661,533,478,350,167,895,712,584, 401,273,218,1023,946,818,763,635,452,324,141,997,869,686,558,503,375,192,920, 792,737,609,426,298,243,115,971,843,660,532,477,349,166,894,711,583,400,272, 217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,191,919,791,736, 608,425,297,242,114,970,842,659,531,476,348,165,893,710,582,399,271,216,1021, 944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790,735,607,424, 296,241,113,969,841,658,530,475,347,164,892,709,581,398,270,215,1020,943,815, 760,632,449,321,138,994,866,683,555,500,372,189,917,789,734,606,423,295,240, 112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019,942,814,759,631, 448,320,137,993,865,682,554,499,371,188,916,788,733,605,422,294,239,111,967, 839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758,630,447,319, 136,992,864,681,553,498,370,187,915,787,732,604,421,293,238,110,966,838,655, 527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446,318,135,991, 863,680,552,497,369,186,914,786,731,603,420,292,237,109,965,837,654,526,471, 343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679, 551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159, 887,704,576,393,265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495, 367,184,912,784,729,601,418,290,235,963,835,652,524,469,341,158,886,703,575, 392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911, 783,728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208, 1013,936,808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599, 416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807, 752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232, 960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440, 312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831,648, 520,465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907, 596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778, 723,412,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283, 228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,776,721,410, 955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826, 515,460,149,694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953,642, 331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458, 147,692,381,1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329,874, 563,508,197,797,742,431,120,976,665,354,899,588,277,222,822,767,456,145,690, 379,1001,924,613,302,247,847,536,481,170,770,715,404,949,638,327,872,561,506, 195,795,740,429,118,974,663,352,897,586,275,220,820,765,454,143,999,688,377, 922,611,300,245] [views:debug,2014-08-19T16:51:02.398,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/109. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:02.399,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",109,active,0} [ns_server:debug,2014-08-19T16:51:02.473,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 107. Nacking mccouch update. [views:debug,2014-08-19T16:51:02.473,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/107. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:02.474,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",107,active,0} [ns_server:debug,2014-08-19T16:51:02.476,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,794,739,428,117,973,845,662,534,479,351,168,896, 768,713,585,402,274,219,947,819,764,636,453,325,142,998,870,687,559,504,376, 193,921,793,738,610,427,299,244,116,972,844,661,533,478,350,167,895,712,584, 401,273,218,1023,946,818,763,635,452,324,141,997,869,686,558,503,375,192,920, 792,737,609,426,298,243,115,971,843,660,532,477,349,166,894,711,583,400,272, 217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,191,919,791,736, 608,425,297,242,114,970,842,659,531,476,348,165,893,710,582,399,271,216,1021, 944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790,735,607,424, 296,241,113,969,841,658,530,475,347,164,892,709,581,398,270,215,1020,943,815, 760,632,449,321,138,994,866,683,555,500,372,189,917,789,734,606,423,295,240, 112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019,942,814,759,631, 448,320,137,993,865,682,554,499,371,188,916,788,733,605,422,294,239,111,967, 839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758,630,447,319, 136,992,864,681,553,498,370,187,915,787,732,604,421,293,238,110,966,838,655, 527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446,318,135,991, 863,680,552,497,369,186,914,786,731,603,420,292,237,109,965,837,654,526,471, 343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679, 551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159, 887,704,576,393,265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495, 367,184,912,784,729,601,418,290,235,107,963,835,652,524,469,341,158,886,703, 575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183, 911,783,728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263, 208,1013,936,808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727, 599,416,288,233,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935, 807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287, 232,960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623, 440,312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959,831, 648,520,465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362, 907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178, 778,723,412,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594, 283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,776,721, 410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226, 826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953, 642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824,513, 458,147,692,381,1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329, 874,563,508,197,797,742,431,120,976,665,354,899,588,277,222,822,767,456,145, 690,379,1001,924,613,302,247,847,536,481,170,770,715,404,949,638,327,872,561, 506,195,795,740,429,118,974,663,352,897,586,275,220,820,765,454,143,999,688, 377,922,611,300,245] [views:debug,2014-08-19T16:51:02.507,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/107. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:02.508,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",107,active,0} [ns_server:debug,2014-08-19T16:51:02.583,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 105. Nacking mccouch update. [views:debug,2014-08-19T16:51:02.583,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/105. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:02.583,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",105,active,0} [ns_server:debug,2014-08-19T16:51:02.585,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,794,739,428,117,973,845,662,534,479,351,168,896, 768,713,585,402,274,219,947,819,764,636,453,325,142,998,870,687,559,504,376, 193,921,793,738,610,427,299,244,116,972,844,661,533,478,350,167,895,712,584, 401,273,218,1023,946,818,763,635,452,324,141,997,869,686,558,503,375,192,920, 792,737,609,426,298,243,115,971,843,660,532,477,349,166,894,711,583,400,272, 217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,191,919,791,736, 608,425,297,242,114,970,842,659,531,476,348,165,893,710,582,399,271,216,1021, 944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790,735,607,424, 296,241,113,969,841,658,530,475,347,164,892,709,581,398,270,215,1020,943,815, 760,632,449,321,138,994,866,683,555,500,372,189,917,789,734,606,423,295,240, 112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019,942,814,759,631, 448,320,137,993,865,682,554,499,371,188,916,788,733,605,422,294,239,111,967, 839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758,630,447,319, 136,992,864,681,553,498,370,187,915,787,732,604,421,293,238,110,966,838,655, 527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446,318,135,991, 863,680,552,497,369,186,914,786,731,603,420,292,237,109,965,837,654,526,471, 343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679, 551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159, 887,704,576,393,265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495, 367,184,912,784,729,601,418,290,235,107,963,835,652,524,469,341,158,886,703, 575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183, 911,783,728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263, 208,1013,936,808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727, 599,416,288,233,105,961,833,650,522,467,339,156,884,701,573,390,262,207,1012, 935,807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415, 287,232,960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751, 623,440,312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,959, 831,648,520,465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673, 362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489, 178,778,723,412,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905, 594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,776, 721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281, 226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,774,719,408, 953,642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824, 513,458,147,692,381,1003,926,615,304,249,849,538,483,172,772,717,406,951,640, 329,874,563,508,197,797,742,431,120,976,665,354,899,588,277,222,822,767,456, 145,690,379,1001,924,613,302,247,847,536,481,170,770,715,404,949,638,327,872, 561,506,195,795,740,429,118,974,663,352,897,586,275,220,820,765,454,143,999, 688,377,922,611,300,245] [views:debug,2014-08-19T16:51:02.617,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/105. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:02.617,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",105,active,0} [ns_server:debug,2014-08-19T16:51:02.774,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 103. Nacking mccouch update. [views:debug,2014-08-19T16:51:02.774,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/103. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:02.775,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",103,active,0} [ns_server:debug,2014-08-19T16:51:02.776,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,794,739,428,117,973,845,662,534,479,351,168,896, 768,713,585,402,274,219,947,819,764,636,453,325,142,998,870,687,559,504,376, 193,921,793,738,610,427,299,244,116,972,844,661,533,478,350,167,895,712,584, 401,273,218,1023,946,818,763,635,452,324,141,997,869,686,558,503,375,192,920, 792,737,609,426,298,243,115,971,843,660,532,477,349,166,894,711,583,400,272, 217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,191,919,791,736, 608,425,297,242,114,970,842,659,531,476,348,165,893,710,582,399,271,216,1021, 944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790,735,607,424, 296,241,113,969,841,658,530,475,347,164,892,709,581,398,270,215,1020,943,815, 760,632,449,321,138,994,866,683,555,500,372,189,917,789,734,606,423,295,240, 112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019,942,814,759,631, 448,320,137,993,865,682,554,499,371,188,916,788,733,605,422,294,239,111,967, 839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758,630,447,319, 136,992,864,681,553,498,370,187,915,787,732,604,421,293,238,110,966,838,655, 527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446,318,135,991, 863,680,552,497,369,186,914,786,731,603,420,292,237,109,965,837,654,526,471, 343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679, 551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159, 887,704,576,393,265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495, 367,184,912,784,729,601,418,290,235,107,963,835,652,524,469,341,158,886,703, 575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183, 911,783,728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263, 208,1013,936,808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727, 599,416,288,233,105,961,833,650,522,467,339,156,884,701,573,390,262,207,1012, 935,807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415, 287,232,960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751, 623,440,312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103, 959,831,648,520,465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984, 673,362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544, 489,178,778,723,412,957,646,335,880,569,258,203,803,748,437,126,982,671,360, 905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176, 776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592, 281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,774,719, 408,953,642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224, 824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,772,717,406,951, 640,329,874,563,508,197,797,742,431,120,976,665,354,899,588,277,222,822,767, 456,145,690,379,1001,924,613,302,247,847,536,481,170,770,715,404,949,638,327, 872,561,506,195,795,740,429,118,974,663,352,897,586,275,220,820,765,454,143, 999,688,377,922,611,300,245] [views:debug,2014-08-19T16:51:02.858,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/103. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:02.858,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",103,active,0} [ns_server:debug,2014-08-19T16:51:02.949,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,384, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_384_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_384_'ns_1@10.242.238.90'">>}]}, {move_state,640, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_640_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_640_'ns_1@10.242.238.89'">>}]}, {move_state,385, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_385_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_385_'ns_1@10.242.238.90'">>}]}, {move_state,641, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_641_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_641_'ns_1@10.242.238.89'">>}]}, {move_state,386, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_386_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_386_'ns_1@10.242.238.90'">>}]}, {move_state,642, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_642_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_642_'ns_1@10.242.238.89'">>}]}, {move_state,896, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_896_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_896_'ns_1@10.242.238.89'">>}]}, {move_state,387, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_387_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_387_'ns_1@10.242.238.90'">>}]}, {move_state,643, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_643_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_643_'ns_1@10.242.238.89'">>}]}, {move_state,897, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_897_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_897_'ns_1@10.242.238.89'">>}]}, {move_state,388, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_388_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_388_'ns_1@10.242.238.90'">>}]}, {move_state,644, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_644_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_644_'ns_1@10.242.238.89'">>}]}, {move_state,898, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_898_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_898_'ns_1@10.242.238.89'">>}]}, {move_state,389, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_389_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_389_'ns_1@10.242.238.90'">>}]}, {move_state,645, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_645_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_645_'ns_1@10.242.238.89'">>}]}, {move_state,899, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_899_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_899_'ns_1@10.242.238.89'">>}]}, {move_state,390, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_390_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_390_'ns_1@10.242.238.90'">>}]}, {move_state,646, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_646_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_646_'ns_1@10.242.238.89'">>}]}, {move_state,900, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_900_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_900_'ns_1@10.242.238.89'">>}]}, {move_state,391, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_391_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_391_'ns_1@10.242.238.90'">>}]}, {move_state,647, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_647_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_647_'ns_1@10.242.238.89'">>}]}, {move_state,901, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_901_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_901_'ns_1@10.242.238.89'">>}]}, {move_state,392, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_392_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_392_'ns_1@10.242.238.90'">>}]}, {move_state,648, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_648_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_648_'ns_1@10.242.238.89'">>}]}, {move_state,902, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_902_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_902_'ns_1@10.242.238.89'">>}]}, {move_state,393, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_393_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_393_'ns_1@10.242.238.90'">>}]}, {move_state,649, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_649_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_649_'ns_1@10.242.238.89'">>}]}, {move_state,903, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_903_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_903_'ns_1@10.242.238.89'">>}]}, {move_state,394, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_394_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_394_'ns_1@10.242.238.90'">>}]}, {move_state,650, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_650_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_650_'ns_1@10.242.238.89'">>}]}, {move_state,904, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_904_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_904_'ns_1@10.242.238.89'">>}]}, {move_state,395, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_395_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_395_'ns_1@10.242.238.90'">>}]}, {move_state,651, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_651_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_651_'ns_1@10.242.238.89'">>}]}, {move_state,905, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_905_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_905_'ns_1@10.242.238.89'">>}]}, {move_state,396, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_396_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_396_'ns_1@10.242.238.90'">>}]}, {move_state,652, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_652_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_652_'ns_1@10.242.238.89'">>}]}, {move_state,906, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_906_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_906_'ns_1@10.242.238.89'">>}]}, {move_state,397, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_397_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_397_'ns_1@10.242.238.90'">>}]}, {move_state,653, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_653_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_653_'ns_1@10.242.238.89'">>}]}, {move_state,907, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_907_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_907_'ns_1@10.242.238.89'">>}]}, {move_state,398, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_398_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_398_'ns_1@10.242.238.90'">>}]}, {move_state,654, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_654_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_654_'ns_1@10.242.238.89'">>}]}, {move_state,908, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_908_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_908_'ns_1@10.242.238.89'">>}]}, {move_state,399, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_399_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_399_'ns_1@10.242.238.90'">>}]}, {move_state,655, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_655_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_655_'ns_1@10.242.238.89'">>}]}, {move_state,909, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_909_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_909_'ns_1@10.242.238.89'">>}]}, {move_state,400, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_400_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_400_'ns_1@10.242.238.90'">>}]}, {move_state,656, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_656_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_656_'ns_1@10.242.238.89'">>}]}, {move_state,910, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_910_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_910_'ns_1@10.242.238.89'">>}]}, {move_state,401, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_401_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_401_'ns_1@10.242.238.90'">>}]}, {move_state,657, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_657_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_657_'ns_1@10.242.238.89'">>}]}, {move_state,911, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_911_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_911_'ns_1@10.242.238.89'">>}]}, {move_state,402, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_402_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_402_'ns_1@10.242.238.90'">>}]}, {move_state,658, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_658_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_658_'ns_1@10.242.238.89'">>}]}, {move_state,912, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_912_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_912_'ns_1@10.242.238.89'">>}]}, {move_state,403, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_403_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_403_'ns_1@10.242.238.90'">>}]}, {move_state,659, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_659_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_659_'ns_1@10.242.238.89'">>}]}, {move_state,913, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_913_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_913_'ns_1@10.242.238.89'">>}]}, {move_state,404, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_404_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_404_'ns_1@10.242.238.90'">>}]}, {move_state,660, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_660_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_660_'ns_1@10.242.238.89'">>}]}, {move_state,914, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_914_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_914_'ns_1@10.242.238.89'">>}]}, {move_state,405, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_405_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_405_'ns_1@10.242.238.90'">>}]}, {move_state,661, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_661_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_661_'ns_1@10.242.238.89'">>}]}, {move_state,915, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_915_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_915_'ns_1@10.242.238.89'">>}]}] [ns_server:debug,2014-08-19T16:51:02.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 384, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 640, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 385, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 641, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 386, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 642, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 896, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 387, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 643, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 897, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 388, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 644, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 898, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 389, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 645, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 899, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 390, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 646, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 900, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 391, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 647, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 901, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 392, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 648, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 902, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 393, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 649, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 903, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 394, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 650, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 904, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 395, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 651, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 905, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 396, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 652, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 906, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 397, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 653, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 907, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 398, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 654, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 908, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 399, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 655, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 909, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 400, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 656, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 910, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 401, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 657, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 911, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 402, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 658, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 912, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 403, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 659, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 913, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 404, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 660, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 914, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 405, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:02.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 661, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:02.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 915, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:03.033,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 101. Nacking mccouch update. [views:debug,2014-08-19T16:51:03.033,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/101. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:03.034,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",101,active,0} [ns_server:debug,2014-08-19T16:51:03.035,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,794,739,428,117,973,845,662,534,479,351,168,896, 768,713,585,402,274,219,947,819,764,636,453,325,142,998,870,687,559,504,376, 193,921,793,738,610,427,299,244,116,972,844,661,533,478,350,167,895,712,584, 401,273,218,1023,946,818,763,635,452,324,141,997,869,686,558,503,375,192,920, 792,737,609,426,298,243,115,971,843,660,532,477,349,166,894,711,583,400,272, 217,1022,945,817,762,634,451,323,140,996,868,685,557,502,374,191,919,791,736, 608,425,297,242,114,970,842,659,531,476,348,165,893,710,582,399,271,216,1021, 944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790,735,607,424, 296,241,113,969,841,658,530,475,347,164,892,709,581,398,270,215,1020,943,815, 760,632,449,321,138,994,866,683,555,500,372,189,917,789,734,606,423,295,240, 112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019,942,814,759,631, 448,320,137,993,865,682,554,499,371,188,916,788,733,605,422,294,239,111,967, 839,656,528,473,345,162,890,707,579,396,268,213,1018,941,813,758,630,447,319, 136,992,864,681,553,498,370,187,915,787,732,604,421,293,238,110,966,838,655, 527,472,344,161,889,706,578,395,267,212,1017,940,812,757,629,446,318,135,991, 863,680,552,497,369,186,914,786,731,603,420,292,237,109,965,837,654,526,471, 343,160,888,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679, 551,496,368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159, 887,704,576,393,265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495, 367,184,912,784,729,601,418,290,235,107,963,835,652,524,469,341,158,886,703, 575,392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183, 911,783,728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263, 208,1013,936,808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727, 599,416,288,233,105,961,833,650,522,467,339,156,884,701,573,390,262,207,1012, 935,807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415, 287,232,960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751, 623,440,312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103, 959,831,648,520,465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984, 673,362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544, 489,178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671, 360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487, 176,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903, 592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,774, 719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279, 224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,772,717,406, 951,640,329,874,563,508,197,797,742,431,120,976,665,354,899,588,277,222,822, 767,456,145,690,379,1001,924,613,302,247,847,536,481,170,770,715,404,949,638, 327,872,561,506,195,795,740,429,118,974,663,352,897,586,275,220,820,765,454, 143,999,688,377,922,611,300,245] [views:debug,2014-08-19T16:51:03.092,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/101. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:03.092,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",101,active,0} [ns_server:debug,2014-08-19T16:51:03.267,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 99. Nacking mccouch update. [views:debug,2014-08-19T16:51:03.267,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/99. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:03.268,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",99,active,0} [ns_server:debug,2014-08-19T16:51:03.270,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,794,739,428,117,973,662,351,896,768,713,585,402, 274,219,947,819,764,636,453,325,142,998,870,687,559,504,376,193,921,793,738, 610,427,299,244,116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023, 946,818,763,635,452,324,141,997,869,686,558,503,375,192,920,792,737,609,426, 298,243,115,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817, 762,634,451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242, 114,970,842,659,531,476,348,165,893,710,582,399,271,216,1021,944,816,761,633, 450,322,139,995,867,684,556,501,373,190,918,790,735,607,424,296,241,113,969, 841,658,530,475,347,164,892,709,581,398,270,215,1020,943,815,760,632,449,321, 138,994,866,683,555,500,372,189,917,789,734,606,423,295,240,112,968,840,657, 529,474,346,163,891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993, 865,682,554,499,371,188,916,788,733,605,422,294,239,111,967,839,656,528,473, 345,162,890,707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681, 553,498,370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161, 889,706,578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497, 369,186,914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,705, 577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496,368,185, 913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393, 265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784, 729,601,418,290,235,107,963,835,652,524,469,341,158,886,703,575,392,264,209, 1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783,728,600, 417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808, 753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233, 105,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960,832, 649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,129, 985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648,520, 465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596, 285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723, 412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283, 228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,99,776,721, 410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226, 826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,774,719,408,953, 642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824,513, 458,147,692,381,1003,926,615,304,249,849,538,483,172,772,717,406,951,640,329, 874,563,508,197,797,742,431,120,976,665,354,899,588,277,222,822,767,456,145, 690,379,1001,924,613,302,247,847,536,481,170,770,715,404,949,638,327,872,561, 506,195,795,740,429,118,974,663,352,897,586,275,220,820,765,454,143,999,688, 377,922,611,300,245,845,534,479,168] [views:debug,2014-08-19T16:51:03.351,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/99. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:03.352,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",99,active,0} [ns_server:debug,2014-08-19T16:51:03.535,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 97. Nacking mccouch update. [views:debug,2014-08-19T16:51:03.535,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/97. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:03.535,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",97,active,0} [ns_server:debug,2014-08-19T16:51:03.537,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,794,739,428,117,973,662,351,896,768,713,585,402, 274,219,947,819,764,636,453,325,142,998,870,687,559,504,376,193,921,793,738, 610,427,299,244,116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023, 946,818,763,635,452,324,141,997,869,686,558,503,375,192,920,792,737,609,426, 298,243,115,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817, 762,634,451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242, 114,970,842,659,531,476,348,165,893,710,582,399,271,216,1021,944,816,761,633, 450,322,139,995,867,684,556,501,373,190,918,790,735,607,424,296,241,113,969, 841,658,530,475,347,164,892,709,581,398,270,215,1020,943,815,760,632,449,321, 138,994,866,683,555,500,372,189,917,789,734,606,423,295,240,112,968,840,657, 529,474,346,163,891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993, 865,682,554,499,371,188,916,788,733,605,422,294,239,111,967,839,656,528,473, 345,162,890,707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681, 553,498,370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161, 889,706,578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497, 369,186,914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,705, 577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496,368,185, 913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393, 265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784, 729,601,418,290,235,107,963,835,652,524,469,341,158,886,703,575,392,264,209, 1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783,728,600, 417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808, 753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233, 105,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960,832, 649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,129, 985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648,520, 465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596, 285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723, 412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283, 228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,99,776,721, 410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226, 826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97,774,719,408, 953,642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824, 513,458,147,692,381,1003,926,615,304,249,849,538,483,172,772,717,406,951,640, 329,874,563,508,197,797,742,431,120,976,665,354,899,588,277,222,822,767,456, 145,690,379,1001,924,613,302,247,847,536,481,170,770,715,404,949,638,327,872, 561,506,195,795,740,429,118,974,663,352,897,586,275,220,820,765,454,143,999, 688,377,922,611,300,245,845,534,479,168] [views:debug,2014-08-19T16:51:03.618,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/97. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:03.619,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",97,active,0} [ns_server:debug,2014-08-19T16:51:03.766,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 95. Nacking mccouch update. [views:debug,2014-08-19T16:51:03.766,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/95. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:03.767,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",95,active,0} [ns_server:debug,2014-08-19T16:51:03.769,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,794,739,428,117,973,662,351,896,768,713,585,402, 274,219,947,819,764,636,453,325,142,998,870,687,559,504,376,193,921,793,738, 610,427,299,244,116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023, 946,818,763,635,452,324,141,997,869,686,558,503,375,192,920,792,737,609,426, 298,243,115,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817, 762,634,451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242, 114,970,842,659,531,476,348,165,893,710,582,399,271,216,1021,944,816,761,633, 450,322,139,995,867,684,556,501,373,190,918,790,735,607,424,296,241,113,969, 841,658,530,475,347,164,892,709,581,398,270,215,1020,943,815,760,632,449,321, 138,994,866,683,555,500,372,189,917,789,734,606,423,295,240,112,968,840,657, 529,474,346,163,891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993, 865,682,554,499,371,188,916,788,733,605,422,294,239,111,967,839,656,528,473, 345,162,890,707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681, 553,498,370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161, 889,706,578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497, 369,186,914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,705, 577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496,368,185, 913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393, 265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784, 729,601,418,290,235,107,963,835,652,524,469,341,158,886,703,575,392,264,209, 1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783,728,600, 417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808, 753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233, 105,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960,832, 649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,129, 985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648,520, 465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596, 285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723, 412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283, 228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,99,776,721, 410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226, 826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97,774,719,408, 953,642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824, 513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95,772,717,406,951, 640,329,874,563,508,197,797,742,431,120,976,665,354,899,588,277,222,822,767, 456,145,690,379,1001,924,613,302,247,847,536,481,170,770,715,404,949,638,327, 872,561,506,195,795,740,429,118,974,663,352,897,586,275,220,820,765,454,143, 999,688,377,922,611,300,245,845,534,479,168] [views:debug,2014-08-19T16:51:03.809,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/95. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:03.809,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",95,active,0} [ns_server:debug,2014-08-19T16:51:03.884,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 93. Nacking mccouch update. [views:debug,2014-08-19T16:51:03.884,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/93. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:03.884,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",93,active,0} [ns_server:debug,2014-08-19T16:51:03.886,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,794,739,428,117,973,662,351,896,768,713,585,402, 274,219,947,819,764,636,453,325,142,998,870,687,559,504,376,193,921,793,738, 610,427,299,244,116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023, 946,818,763,635,452,324,141,997,869,686,558,503,375,192,920,792,737,609,426, 298,243,115,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945,817, 762,634,451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242, 114,970,842,659,531,476,348,165,893,710,582,399,271,216,1021,944,816,761,633, 450,322,139,995,867,684,556,501,373,190,918,790,735,607,424,296,241,113,969, 841,658,530,475,347,164,892,709,581,398,270,215,1020,943,815,760,632,449,321, 138,994,866,683,555,500,372,189,917,789,734,606,423,295,240,112,968,840,657, 529,474,346,163,891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993, 865,682,554,499,371,188,916,788,733,605,422,294,239,111,967,839,656,528,473, 345,162,890,707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681, 553,498,370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161, 889,706,578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497, 369,186,914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,705, 577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496,368,185, 913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393, 265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784, 729,601,418,290,235,107,963,835,652,524,469,341,158,886,703,575,392,264,209, 1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783,728,600, 417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808, 753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233, 105,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960,832, 649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,129, 985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648,520, 465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596, 285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723, 412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283, 228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,99,776,721, 410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226, 826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97,774,719,408, 953,642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824, 513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95,772,717,406,951, 640,329,874,563,508,197,797,742,431,120,976,665,354,899,588,277,222,822,767, 456,145,690,379,1001,924,613,302,247,847,536,481,170,93,770,715,404,949,638, 327,872,561,506,195,795,740,429,118,974,663,352,897,586,275,220,820,765,454, 143,999,688,377,922,611,300,245,845,534,479,168] [views:debug,2014-08-19T16:51:03.918,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/93. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:03.918,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",93,active,0} [ns_server:debug,2014-08-19T16:51:03.993,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 91. Nacking mccouch update. [views:debug,2014-08-19T16:51:03.993,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/91. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:03.993,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",91,active,0} [ns_server:debug,2014-08-19T16:51:03.995,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,794,739,428,117,973,662,351,91,896,768,713,585, 402,274,219,947,819,764,636,453,325,142,998,870,687,559,504,376,193,921,793, 738,610,427,299,244,116,972,844,661,533,478,350,167,895,712,584,401,273,218, 1023,946,818,763,635,452,324,141,997,869,686,558,503,375,192,920,792,737,609, 426,298,243,115,971,843,660,532,477,349,166,894,711,583,400,272,217,1022,945, 817,762,634,451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297, 242,114,970,842,659,531,476,348,165,893,710,582,399,271,216,1021,944,816,761, 633,450,322,139,995,867,684,556,501,373,190,918,790,735,607,424,296,241,113, 969,841,658,530,475,347,164,892,709,581,398,270,215,1020,943,815,760,632,449, 321,138,994,866,683,555,500,372,189,917,789,734,606,423,295,240,112,968,840, 657,529,474,346,163,891,708,580,397,269,214,1019,942,814,759,631,448,320,137, 993,865,682,554,499,371,188,916,788,733,605,422,294,239,111,967,839,656,528, 473,345,162,890,707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864, 681,553,498,370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344, 161,889,706,578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552, 497,369,186,914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888, 705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496,368, 185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576, 393,265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912, 784,729,601,418,290,235,107,963,835,652,524,469,341,158,886,703,575,392,264, 209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783,728, 600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936, 808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288, 233,105,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752, 624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960, 832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312, 129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648, 520,465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907, 596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778, 723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594, 283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,99,776, 721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281, 226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97,774,719, 408,953,642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224, 824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95,772,717,406, 951,640,329,874,563,508,197,797,742,431,120,976,665,354,899,588,277,222,822, 767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93,770,715,404,949, 638,327,872,561,506,195,795,740,429,118,974,663,352,897,586,275,220,820,765, 454,143,999,688,377,922,611,300,245,845,534,479,168] [views:debug,2014-08-19T16:51:04.027,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/91. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:04.027,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",91,active,0} [ns_server:debug,2014-08-19T16:51:04.102,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 89. Nacking mccouch update. [views:debug,2014-08-19T16:51:04.102,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/89. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:04.102,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",89,active,0} [ns_server:debug,2014-08-19T16:51:04.104,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,794,739,428,117,973,662,351,896,585,274,219,947, 819,764,636,453,325,142,998,870,687,559,504,376,193,921,793,738,610,427,299, 244,116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763, 635,452,324,141,997,869,686,558,503,375,192,920,792,737,609,426,298,243,115, 971,843,660,532,477,349,166,894,89,711,583,400,272,217,1022,945,817,762,634, 451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322, 139,995,867,684,556,501,373,190,918,790,735,607,424,296,241,113,969,841,658, 530,475,347,164,892,709,581,398,270,215,1020,943,815,760,632,449,321,138,994, 866,683,555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474, 346,163,891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993,865,682, 554,499,371,188,916,788,733,605,422,294,239,111,967,839,656,528,473,345,162, 890,707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681,553,498, 370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161,889,706, 578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497,369,186, 914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,705,577,394, 266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496,368,185,913,785, 730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393,265,210, 1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784,729,601, 418,290,235,107,963,835,652,524,469,341,158,886,703,575,392,264,209,1014,937, 809,754,626,443,315,132,988,860,677,549,494,366,183,911,783,728,600,417,289, 234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808,753,625, 442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105,961, 833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752,624,441,313, 130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960,832,649,521, 466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,129,985,857, 674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648,520,465,337, 154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285,230, 830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723,412,101, 957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228,828, 517,462,151,696,385,1007,930,619,308,253,853,542,487,176,99,776,721,410,955, 644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826,515, 460,149,694,383,1005,928,617,306,251,851,540,485,174,97,774,719,408,953,642, 331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824,513,458, 147,692,381,1003,926,615,304,249,849,538,483,172,95,772,717,406,951,640,329, 874,563,508,197,797,742,431,120,976,665,354,899,588,277,222,822,767,456,145, 690,379,1001,924,613,302,247,847,536,481,170,93,770,715,404,949,638,327,872, 561,506,195,795,740,429,118,974,663,352,897,586,275,220,820,765,454,143,999, 688,377,922,611,300,245,845,534,479,168,91,768,713,402] [views:debug,2014-08-19T16:51:04.136,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/89. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:04.137,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",89,active,0} [ns_server:debug,2014-08-19T16:51:04.228,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 87. Nacking mccouch update. [views:debug,2014-08-19T16:51:04.228,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/87. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:04.229,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",87,active,0} [ns_server:debug,2014-08-19T16:51:04.230,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,794,739,428,117,973,662,351,896,585,274,219,947, 819,764,636,453,325,142,998,870,687,559,504,376,193,921,793,738,610,427,299, 244,116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763, 635,452,324,141,997,869,686,558,503,375,192,920,792,737,609,426,298,243,115, 971,843,660,532,477,349,166,894,89,711,583,400,272,217,1022,945,817,762,634, 451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322, 139,995,867,684,556,501,373,190,918,790,735,607,424,296,241,113,969,841,658, 530,475,347,164,892,87,709,581,398,270,215,1020,943,815,760,632,449,321,138, 994,866,683,555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529, 474,346,163,891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993,865, 682,554,499,371,188,916,788,733,605,422,294,239,111,967,839,656,528,473,345, 162,890,707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681,553, 498,370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161,889, 706,578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497,369, 186,914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,705,577, 394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496,368,185,913, 785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393,265, 210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784,729, 601,418,290,235,107,963,835,652,524,469,341,158,886,703,575,392,264,209,1014, 937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783,728,600,417, 289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808,753, 625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105, 961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752,624,441, 313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960,832,649, 521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,129,985, 857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648,520,465, 337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596,285, 230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723,412, 101,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283,228, 828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,99,776,721,410, 955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226,826, 515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97,774,719,408,953, 642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824,513, 458,147,692,381,1003,926,615,304,249,849,538,483,172,95,772,717,406,951,640, 329,874,563,508,197,797,742,431,120,976,665,354,899,588,277,222,822,767,456, 145,690,379,1001,924,613,302,247,847,536,481,170,93,770,715,404,949,638,327, 872,561,506,195,795,740,429,118,974,663,352,897,586,275,220,820,765,454,143, 999,688,377,922,611,300,245,845,534,479,168,91,768,713,402] [rebalance:info,2014-08-19T16:51:04.253,ns_1@10.242.238.88:<0.14662.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 398 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:04.253,ns_1@10.242.238.88:<0.14585.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 399 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:04.254,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 398 state to active [rebalance:info,2014-08-19T16:51:04.255,ns_1@10.242.238.88:<0.14662.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 398 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:04.255,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 399 state to active [rebalance:info,2014-08-19T16:51:04.256,ns_1@10.242.238.88:<0.14585.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 399 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:04.256,ns_1@10.242.238.88:<0.14662.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:04.257,ns_1@10.242.238.88:<0.14585.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:51:04.287,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/87. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:04.287,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",87,active,0} [rebalance:info,2014-08-19T16:51:04.406,ns_1@10.242.238.88:<0.14522.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 400 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:04.406,ns_1@10.242.238.88:<0.14445.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 401 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:04.406,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 400 state to active [rebalance:info,2014-08-19T16:51:04.409,ns_1@10.242.238.88:<0.14522.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 400 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:04.409,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 401 state to active [rebalance:info,2014-08-19T16:51:04.410,ns_1@10.242.238.88:<0.14445.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 401 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:04.410,ns_1@10.242.238.88:<0.14522.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:04.410,ns_1@10.242.238.88:<0.14445.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:04.444,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 85. Nacking mccouch update. [views:debug,2014-08-19T16:51:04.444,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/85. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:04.444,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",85,active,0} [ns_server:debug,2014-08-19T16:51:04.447,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,794,739,428,117,973,662,351,896,585,274,219,947, 819,764,636,453,325,142,998,870,687,559,504,376,193,921,793,738,610,427,299, 244,116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763, 635,452,324,141,997,869,686,558,503,375,192,920,792,737,609,426,298,243,115, 971,843,660,532,477,349,166,894,89,711,583,400,272,217,1022,945,817,762,634, 451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322, 139,995,867,684,556,501,373,190,918,790,735,607,424,296,241,113,969,841,658, 530,475,347,164,892,87,709,581,398,270,215,1020,943,815,760,632,449,321,138, 994,866,683,555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529, 474,346,163,891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993,865, 682,554,499,371,188,916,788,733,605,422,294,239,111,967,839,656,528,473,345, 162,890,85,707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681, 553,498,370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161, 889,706,578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497, 369,186,914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,705, 577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496,368,185, 913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393, 265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784, 729,601,418,290,235,107,963,835,652,524,469,341,158,886,703,575,392,264,209, 1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783,728,600, 417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808, 753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233, 105,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960,832, 649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,129, 985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648,520, 465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596, 285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723, 412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283, 228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,99,776,721, 410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226, 826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97,774,719,408, 953,642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824, 513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95,772,717,406,951, 640,329,874,563,508,197,797,742,431,120,976,665,354,899,588,277,222,822,767, 456,145,690,379,1001,924,613,302,247,847,536,481,170,93,770,715,404,949,638, 327,872,561,506,195,795,740,429,118,974,663,352,897,586,275,220,820,765,454, 143,999,688,377,922,611,300,245,845,534,479,168,91,768,713,402] [views:debug,2014-08-19T16:51:04.511,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/85. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:04.512,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",85,active,0} [rebalance:info,2014-08-19T16:51:04.536,ns_1@10.242.238.88:<0.14368.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 402 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:04.536,ns_1@10.242.238.88:<0.14291.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 403 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:04.537,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 402 state to active [rebalance:info,2014-08-19T16:51:04.538,ns_1@10.242.238.88:<0.14368.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 402 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:04.538,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 403 state to active [rebalance:info,2014-08-19T16:51:04.539,ns_1@10.242.238.88:<0.14291.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 403 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:04.539,ns_1@10.242.238.88:<0.14368.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:04.539,ns_1@10.242.238.88:<0.14291.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:04.595,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 83. Nacking mccouch update. [views:debug,2014-08-19T16:51:04.595,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/83. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:04.595,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",83,active,0} [ns_server:debug,2014-08-19T16:51:04.597,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,794,739,428,117,973,662,351,896,585,274,219,947, 819,764,636,453,325,142,998,870,687,559,504,376,193,921,793,738,610,427,299, 244,116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763, 635,452,324,141,997,869,686,558,503,375,192,920,792,737,609,426,298,243,115, 971,843,660,532,477,349,166,894,89,711,583,400,272,217,1022,945,817,762,634, 451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322, 139,995,867,684,556,501,373,190,918,790,735,607,424,296,241,113,969,841,658, 530,475,347,164,892,87,709,581,398,270,215,1020,943,815,760,632,449,321,138, 994,866,683,555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529, 474,346,163,891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993,865, 682,554,499,371,188,916,788,733,605,422,294,239,111,967,839,656,528,473,345, 162,890,85,707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681, 553,498,370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161, 889,706,578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497, 369,186,914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,83, 705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496,368, 185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576, 393,265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912, 784,729,601,418,290,235,107,963,835,652,524,469,341,158,886,703,575,392,264, 209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783,728, 600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936, 808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288, 233,105,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807,752, 624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960, 832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312, 129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648, 520,465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907, 596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778, 723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594, 283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,99,776, 721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281, 226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97,774,719, 408,953,642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224, 824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95,772,717,406, 951,640,329,874,563,508,197,797,742,431,120,976,665,354,899,588,277,222,822, 767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93,770,715,404,949, 638,327,872,561,506,195,795,740,429,118,974,663,352,897,586,275,220,820,765, 454,143,999,688,377,922,611,300,245,845,534,479,168,91,768,713,402] [views:debug,2014-08-19T16:51:04.646,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/83. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:04.646,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",83,active,0} [rebalance:info,2014-08-19T16:51:04.662,ns_1@10.242.238.88:<0.14136.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 405 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:04.662,ns_1@10.242.238.88:<0.14214.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 404 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:04.662,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 405 state to active [rebalance:info,2014-08-19T16:51:04.663,ns_1@10.242.238.88:<0.14136.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 405 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:04.664,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 404 state to active [rebalance:info,2014-08-19T16:51:04.665,ns_1@10.242.238.88:<0.14214.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 404 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:04.665,ns_1@10.242.238.88:<0.14136.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:04.665,ns_1@10.242.238.88:<0.14214.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:04.762,ns_1@10.242.238.88:<0.14781.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 652 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:04.762,ns_1@10.242.238.88:<0.14704.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 653 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:04.763,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 653 state to active [rebalance:info,2014-08-19T16:51:04.764,ns_1@10.242.238.88:<0.14704.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 653 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:04.764,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 652 state to active [rebalance:info,2014-08-19T16:51:04.765,ns_1@10.242.238.88:<0.14781.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 652 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:04.765,ns_1@10.242.238.88:<0.14704.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:04.765,ns_1@10.242.238.88:<0.14781.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:04.814,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 81. Nacking mccouch update. [views:debug,2014-08-19T16:51:04.814,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/81. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:04.814,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",81,active,0} [ns_server:debug,2014-08-19T16:51:04.816,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,794,739,428,117,973,662,351,896,585,274,219,947, 819,764,636,453,325,142,998,870,687,559,504,376,193,921,793,738,610,427,299, 244,116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763, 635,452,324,141,997,869,686,558,503,375,192,920,792,737,609,426,298,243,115, 971,843,660,532,477,349,166,894,89,711,583,400,272,217,1022,945,817,762,634, 451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322, 139,995,867,684,556,501,373,190,918,790,735,607,424,296,241,113,969,841,658, 530,475,347,164,892,87,709,581,398,270,215,1020,943,815,760,632,449,321,138, 994,866,683,555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529, 474,346,163,891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993,865, 682,554,499,371,188,916,788,733,605,422,294,239,111,967,839,656,528,473,345, 162,890,85,707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681, 553,498,370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161, 889,706,578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497, 369,186,914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,83, 705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496,368, 185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576, 393,265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912, 784,729,601,418,290,235,107,963,835,652,524,469,341,158,886,81,703,575,392, 264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783, 728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013, 936,808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416, 288,233,105,961,833,650,522,467,339,156,884,701,573,390,262,207,1012,935,807, 752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232, 960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440, 312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831, 648,520,465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362, 907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178, 778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905, 594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,99, 776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592, 281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97,774, 719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279, 224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95,772,717, 406,951,640,329,874,563,508,197,797,742,431,120,976,665,354,899,588,277,222, 822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93,770,715,404, 949,638,327,872,561,506,195,795,740,429,118,974,663,352,897,586,275,220,820, 765,454,143,999,688,377,922,611,300,245,845,534,479,168,91,768,713,402] [views:debug,2014-08-19T16:51:04.898,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/81. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:04.898,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",81,active,0} [rebalance:info,2014-08-19T16:51:04.921,ns_1@10.242.238.88:<0.14641.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 654 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:04.921,ns_1@10.242.238.88:<0.14564.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 655 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:04.921,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 654 state to active [rebalance:info,2014-08-19T16:51:04.922,ns_1@10.242.238.88:<0.14641.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 654 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:04.922,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 655 state to active [rebalance:info,2014-08-19T16:51:04.923,ns_1@10.242.238.88:<0.14564.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 655 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:04.924,ns_1@10.242.238.88:<0.14641.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:04.924,ns_1@10.242.238.88:<0.14564.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:05.038,ns_1@10.242.238.88:<0.14424.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 657 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:05.038,ns_1@10.242.238.88:<0.14495.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 656 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:05.039,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 657 state to active [rebalance:info,2014-08-19T16:51:05.040,ns_1@10.242.238.88:<0.14424.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 657 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:05.040,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 656 state to active [rebalance:info,2014-08-19T16:51:05.041,ns_1@10.242.238.88:<0.14495.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 656 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:05.041,ns_1@10.242.238.88:<0.14424.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:05.041,ns_1@10.242.238.88:<0.14495.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:05.073,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 79. Nacking mccouch update. [views:debug,2014-08-19T16:51:05.073,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/79. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:05.073,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",79,active,0} [ns_server:debug,2014-08-19T16:51:05.075,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,794,739,428,117,973,662,351,896,585,274,219,819, 764,453,142,998,870,687,559,504,376,193,921,793,738,610,427,299,244,116,972, 844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635,452,324, 141,997,869,686,558,503,375,192,920,792,737,609,426,298,243,115,971,843,660, 532,477,349,166,894,89,711,583,400,272,217,1022,945,817,762,634,451,323,140, 996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970,842,659,531, 476,348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322,139,995,867, 684,556,501,373,190,918,790,735,607,424,296,241,113,969,841,658,530,475,347, 164,892,87,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683, 555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,163, 891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993,865,682,554,499, 371,188,916,788,733,605,422,294,239,111,967,839,656,528,473,345,162,890,85, 707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681,553,498,370, 187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161,889,706,578, 395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497,369,186,914, 786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,83,705,577,394, 266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496,368,185,913,785, 730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393,265,210, 1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784,729,601, 418,290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014, 937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783,728,600,417, 289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808,753, 625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105, 961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960,832, 649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,129, 985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648,520, 465,337,154,882,699,571,388,260,205,1010,805,750,439,128,984,673,362,907,596, 285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723, 412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594,283, 228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,99,776,721, 410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281,226, 826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97,774,719,408, 953,642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224,824, 513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95,772,717,406,951, 640,329,874,563,508,197,797,742,431,120,976,665,354,899,588,277,222,822,767, 456,145,690,379,1001,924,613,302,247,847,536,481,170,93,770,715,404,949,638, 327,872,561,506,195,795,740,429,118,974,663,352,897,586,275,220,820,765,454, 143,999,688,377,922,611,300,245,845,534,479,168,91,768,713,402,947,636,325] [views:debug,2014-08-19T16:51:05.157,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/79. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:05.157,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",79,active,0} [rebalance:info,2014-08-19T16:51:05.175,ns_1@10.242.238.88:<0.14270.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 659 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:05.175,ns_1@10.242.238.88:<0.14347.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 658 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:05.176,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 659 state to active [rebalance:info,2014-08-19T16:51:05.177,ns_1@10.242.238.88:<0.14270.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 659 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:05.177,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 658 state to active [rebalance:info,2014-08-19T16:51:05.178,ns_1@10.242.238.88:<0.14347.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 658 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:05.178,ns_1@10.242.238.88:<0.14270.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:05.178,ns_1@10.242.238.88:<0.14347.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:05.268,ns_1@10.242.238.88:<0.14193.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 660 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:05.268,ns_1@10.242.238.88:<0.14101.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 661 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:05.268,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 660 state to active [rebalance:info,2014-08-19T16:51:05.269,ns_1@10.242.238.88:<0.14193.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 660 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:05.269,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 661 state to active [rebalance:info,2014-08-19T16:51:05.270,ns_1@10.242.238.88:<0.14101.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 661 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:05.271,ns_1@10.242.238.88:<0.14193.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:05.271,ns_1@10.242.238.88:<0.14101.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:05.278,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 77. Nacking mccouch update. [views:debug,2014-08-19T16:51:05.278,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/77. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:05.279,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",77,active,0} [ns_server:debug,2014-08-19T16:51:05.281,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,697,386,1008,931,620,309, 254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436,125, 981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252,852, 541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979,668, 357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539,484, 173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355,900, 589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171,771, 716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587,276, 221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714,403, 948,637,326,871,560,505,194,794,739,428,117,973,662,351,896,585,274,219,819, 764,453,142,998,870,687,559,504,376,193,921,793,738,610,427,299,244,116,972, 844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635,452,324, 141,997,869,686,558,503,375,192,920,792,737,609,426,298,243,115,971,843,660, 532,477,349,166,894,89,711,583,400,272,217,1022,945,817,762,634,451,323,140, 996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970,842,659,531, 476,348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322,139,995,867, 684,556,501,373,190,918,790,735,607,424,296,241,113,969,841,658,530,475,347, 164,892,87,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683, 555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,163, 891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993,865,682,554,499, 371,188,916,788,733,605,422,294,239,111,967,839,656,528,473,345,162,890,85, 707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681,553,498,370, 187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161,889,706,578, 395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497,369,186,914, 786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,83,705,577,394, 266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496,368,185,913,785, 730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393,265,210, 1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784,729,601, 418,290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014, 937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783,728,600,417, 289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808,753, 625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105, 961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960,832, 649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,129, 985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648,520, 465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673,362,907, 596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778, 723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594, 283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,99,776, 721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281, 226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97,774,719, 408,953,642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224, 824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95,772,717,406, 951,640,329,874,563,508,197,797,742,431,120,976,665,354,899,588,277,222,822, 767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93,770,715,404,949, 638,327,872,561,506,195,795,740,429,118,974,663,352,897,586,275,220,820,765, 454,143,999,688,377,922,611,300,245,845,534,479,168,91,768,713,402,947,636, 325] [views:debug,2014-08-19T16:51:05.312,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/77. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:05.313,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",77,active,0} [ns_server:debug,2014-08-19T16:51:05.389,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 75. Nacking mccouch update. [views:debug,2014-08-19T16:51:05.389,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/75. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:05.389,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",75,active,0} [rebalance:info,2014-08-19T16:51:05.393,ns_1@10.242.238.88:<0.14760.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 906 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:debug,2014-08-19T16:51:05.392,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,695,384,1006,929,618,307,252, 852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123,979, 668,357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850,539, 484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666,355, 900,589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482,171, 771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898,587, 276,221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769,714, 403,948,637,326,871,560,505,194,794,739,428,117,973,662,351,896,585,274,219, 819,764,453,142,998,870,687,559,504,376,193,921,793,738,610,427,299,244,116, 972,844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635,452, 324,141,997,869,686,558,503,375,192,920,792,737,609,426,298,243,115,971,843, 660,532,477,349,166,894,89,711,583,400,272,217,1022,945,817,762,634,451,323, 140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970,842,659, 531,476,348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322,139,995, 867,684,556,501,373,190,918,790,735,607,424,296,241,113,969,841,658,530,475, 347,164,892,87,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866, 683,555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346, 163,891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993,865,682,554, 499,371,188,916,788,733,605,422,294,239,111,967,839,656,528,473,345,162,890, 85,707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681,553,498, 370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161,889,706, 578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497,369,186, 914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,83,705,577, 394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496,368,185,913, 785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393,265, 210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784,729, 601,418,290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264,209, 1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783,728,600, 417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808, 753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233, 105,961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752, 624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960, 832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312, 129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648, 520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673,362, 907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178, 778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905, 594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,99, 776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592, 281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97,774, 719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279, 224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95,772,717, 406,951,640,329,874,563,508,197,797,742,431,120,976,665,354,899,588,277,222, 822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93,770,715,404, 949,638,327,872,561,506,195,795,740,429,118,974,663,352,897,586,275,220,820, 765,454,143,999,688,377,922,611,300,245,845,534,479,168,91,768,713,402,947, 636,325] [rebalance:info,2014-08-19T16:51:05.393,ns_1@10.242.238.88:<0.14683.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 907 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:05.393,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 906 state to active [rebalance:info,2014-08-19T16:51:05.395,ns_1@10.242.238.88:<0.14760.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 906 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:05.395,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 907 state to active [rebalance:info,2014-08-19T16:51:05.396,ns_1@10.242.238.88:<0.14683.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 907 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:05.396,ns_1@10.242.238.88:<0.14760.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:05.396,ns_1@10.242.238.88:<0.14683.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:51:05.422,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/75. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:05.423,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",75,active,0} [rebalance:info,2014-08-19T16:51:05.460,ns_1@10.242.238.88:<0.14620.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 908 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:05.460,ns_1@10.242.238.88:<0.14543.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 909 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:05.461,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 908 state to active [rebalance:info,2014-08-19T16:51:05.462,ns_1@10.242.238.88:<0.14620.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 908 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:05.462,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 909 state to active [rebalance:info,2014-08-19T16:51:05.463,ns_1@10.242.238.88:<0.14543.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 909 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:05.463,ns_1@10.242.238.88:<0.14620.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:05.464,ns_1@10.242.238.88:<0.14543.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:05.544,ns_1@10.242.238.88:<0.14395.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 911 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:05.544,ns_1@10.242.238.88:<0.14466.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 910 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:05.544,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 911 state to active [rebalance:info,2014-08-19T16:51:05.545,ns_1@10.242.238.88:<0.14395.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 911 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:05.545,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 910 state to active [rebalance:info,2014-08-19T16:51:05.546,ns_1@10.242.238.88:<0.14466.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 910 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:05.547,ns_1@10.242.238.88:<0.14395.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:05.547,ns_1@10.242.238.88:<0.14466.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:05.598,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 73. Nacking mccouch update. [views:debug,2014-08-19T16:51:05.598,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/73. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:05.598,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",73,active,0} [ns_server:debug,2014-08-19T16:51:05.600,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,693,382,1004,927,616,305,250,850, 539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977,666, 355,900,589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537,482, 171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353,898, 587,276,221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169,769, 714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351,896,585,274, 219,819,764,453,142,998,870,687,559,504,376,193,921,793,738,610,427,299,244, 116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635, 452,324,141,997,869,686,558,503,375,192,920,792,737,609,426,298,243,115,971, 843,660,532,477,349,166,894,89,711,583,400,272,217,1022,945,817,762,634,451, 323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970,842, 659,531,476,348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322,139, 995,867,684,556,501,373,190,918,790,735,607,424,296,241,113,969,841,658,530, 475,347,164,892,87,709,581,398,270,215,1020,943,815,760,632,449,321,138,994, 866,683,555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474, 346,163,891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993,865,682, 554,499,371,188,916,788,733,605,422,294,239,111,967,839,656,528,473,345,162, 890,85,707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681,553, 498,370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161,889, 706,578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497,369, 186,914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,83,705, 577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496,368,185, 913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393, 265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784, 729,601,418,290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264, 209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783,728, 600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936, 808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288, 233,105,961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807, 752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232, 960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440, 312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831, 648,520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673, 362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489, 178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,360, 905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176, 99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903, 592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97, 774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,356,901,590, 279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95,772, 717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,354,899,588,277, 222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93,770,715, 404,949,638,327,872,561,506,195,795,740,429,118,974,663,352,897,586,275,220, 820,765,454,143,999,688,377,922,611,300,245,845,534,479,168,91,768,713,402, 947,636,325] [views:debug,2014-08-19T16:51:05.632,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/73. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:05.632,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",73,active,0} [rebalance:info,2014-08-19T16:51:05.663,ns_1@10.242.238.88:<0.14243.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 913 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:05.663,ns_1@10.242.238.88:<0.14312.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 912 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:05.663,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 913 state to active [rebalance:info,2014-08-19T16:51:05.665,ns_1@10.242.238.88:<0.14243.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 913 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:05.665,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 912 state to active [rebalance:info,2014-08-19T16:51:05.666,ns_1@10.242.238.88:<0.14312.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 912 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:05.666,ns_1@10.242.238.88:<0.14243.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:05.667,ns_1@10.242.238.88:<0.14312.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:05.707,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 71. Nacking mccouch update. [views:debug,2014-08-19T16:51:05.707,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/71. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:05.707,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",71,active,0} [ns_server:debug,2014-08-19T16:51:05.709,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,380,1002,925,614,303,248,848,537, 482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664,353, 898,587,276,221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480,169, 769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351,896,585, 274,219,819,764,453,142,998,870,687,559,504,376,193,921,793,738,610,427,299, 244,116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763, 635,452,324,141,997,869,686,558,503,375,192,920,792,737,609,426,298,243,115, 971,843,660,532,477,349,166,894,89,711,583,400,272,217,1022,945,817,762,634, 451,323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322, 139,995,867,684,556,501,373,190,918,790,735,607,424,296,241,113,969,841,658, 530,475,347,164,892,87,709,581,398,270,215,1020,943,815,760,632,449,321,138, 994,866,683,555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529, 474,346,163,891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993,865, 682,554,499,371,188,916,788,733,605,422,294,239,111,967,839,656,528,473,345, 162,890,85,707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681, 553,498,370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161, 889,706,578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497, 369,186,914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,83, 705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496,368, 185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576, 393,265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912, 784,729,601,418,290,235,107,963,835,652,524,469,341,158,886,81,703,575,392, 264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783, 728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013, 936,808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416, 288,233,105,961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935, 807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287, 232,960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623, 440,312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959, 831,648,520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984, 673,362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544, 489,178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671, 360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487, 176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,358, 903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174, 97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,356,901, 590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95, 772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,354,899,588, 277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93,770, 715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,352,897,586,275, 220,820,765,454,143,999,688,377,922,611,300,245,845,534,479,168,91,768,713, 402,947,636,325] [views:debug,2014-08-19T16:51:05.741,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/71. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:05.741,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",71,active,0} [rebalance:info,2014-08-19T16:51:05.778,ns_1@10.242.238.88:<0.14080.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 915 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:05.778,ns_1@10.242.238.88:<0.14157.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 914 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:05.779,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 915 state to active [rebalance:info,2014-08-19T16:51:05.780,ns_1@10.242.238.88:<0.14080.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 915 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:05.781,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 914 state to active [rebalance:info,2014-08-19T16:51:05.782,ns_1@10.242.238.88:<0.14157.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 914 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:05.783,ns_1@10.242.238.88:<0.14080.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:05.783,ns_1@10.242.238.88:<0.14157.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:05.816,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 69. Nacking mccouch update. [views:debug,2014-08-19T16:51:05.816,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/69. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:05.816,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",69,active,0} [ns_server:debug,2014-08-19T16:51:05.818,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,378,1000,923,612,301,246,846,535,480, 169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351,896, 585,274,219,819,764,453,142,998,687,376,921,793,738,610,427,299,244,116,972, 844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635,452,324, 141,997,869,686,558,503,375,192,920,792,737,609,426,298,243,115,971,843,660, 532,477,349,166,894,89,711,583,400,272,217,1022,945,817,762,634,451,323,140, 996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970,842,659,531, 476,348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322,139,995,867, 684,556,501,373,190,918,790,735,607,424,296,241,113,969,841,658,530,475,347, 164,892,87,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683, 555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,163, 891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993,865,682,554,499, 371,188,916,788,733,605,422,294,239,111,967,839,656,528,473,345,162,890,85, 707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681,553,498,370, 187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161,889,706,578, 395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497,369,186,914, 786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,83,705,577,394, 266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496,368,185,913,785, 730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393,265,210, 1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784,729,601, 418,290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014, 937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783,728,600,417, 289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808,753, 625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105, 961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960,832, 649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,129, 985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648,520, 465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673,362,907, 596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778, 723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905,594, 283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,99,776, 721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592,281, 226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97,774,719, 408,953,642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279,224, 824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95,772,717,406, 951,640,329,874,563,508,197,797,742,431,120,976,665,354,899,588,277,222,822, 767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93,770,715,404,949, 638,327,872,561,506,195,795,740,429,118,974,663,352,897,586,275,220,820,765, 454,143,999,688,377,922,611,300,245,845,534,479,168,91,768,713,402,947,636, 325,870,559,504,193] [rebalance:info,2014-08-19T16:51:05.864,ns_1@10.242.238.88:<0.15679.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 384 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:05.864,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 384 state to active [rebalance:info,2014-08-19T16:51:05.864,ns_1@10.242.238.88:<0.15636.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 385 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:05.865,ns_1@10.242.238.88:<0.15679.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 384 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:05.865,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 385 state to active [views:debug,2014-08-19T16:51:05.866,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/69. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:05.866,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",69,active,0} [rebalance:info,2014-08-19T16:51:05.867,ns_1@10.242.238.88:<0.15636.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 385 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:05.867,ns_1@10.242.238.88:<0.15679.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:05.867,ns_1@10.242.238.88:<0.15636.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:05.989,ns_1@10.242.238.88:<0.15583.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 386 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:05.989,ns_1@10.242.238.88:<0.15503.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 387 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:05.989,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 386 state to active [rebalance:info,2014-08-19T16:51:05.990,ns_1@10.242.238.88:<0.15583.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 386 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:05.990,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 387 state to active [rebalance:info,2014-08-19T16:51:05.991,ns_1@10.242.238.88:<0.15503.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 387 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:05.991,ns_1@10.242.238.88:<0.15583.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:05.992,ns_1@10.242.238.88:<0.15503.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:06.024,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 67. Nacking mccouch update. [views:debug,2014-08-19T16:51:06.024,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/67. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:06.025,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",67,active,0} [ns_server:debug,2014-08-19T16:51:06.026,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846,535, 480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351, 896,585,274,219,819,764,453,142,998,687,376,921,793,738,610,427,299,244,116, 972,844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635,452, 324,141,997,869,686,558,503,375,192,920,792,737,609,426,298,243,115,971,843, 660,532,477,349,166,894,89,711,583,400,272,217,1022,945,817,762,634,451,323, 140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970,842,659, 531,476,348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322,139,995, 867,684,556,501,373,190,918,790,735,607,424,296,241,113,969,841,658,530,475, 347,164,892,87,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866, 683,555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346, 163,891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993,865,682,554, 499,371,188,916,788,733,605,422,294,239,111,967,839,656,528,473,345,162,890, 85,707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681,553,498, 370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161,889,706, 578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497,369,186, 914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,83,705,577, 394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496,368,185,913, 785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393,265, 210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784,729, 601,418,290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264,209, 1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783,728,600, 417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808, 753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233, 105,961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752, 624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960, 832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312, 129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648, 520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673,362, 907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178, 778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905, 594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,99, 776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592, 281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97,774, 719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279, 224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95,772,717, 406,951,640,329,874,563,508,197,797,742,431,120,976,665,354,899,588,277,222, 822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93,770,715,404, 949,638,327,872,561,506,195,795,740,429,118,974,663,352,897,586,275,220,820, 765,454,143,999,688,377,922,611,300,245,845,534,479,168,91,768,713,402,947, 636,325,870,559,504,193] [views:debug,2014-08-19T16:51:06.108,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/67. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:06.109,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",67,active,0} [rebalance:info,2014-08-19T16:51:06.156,ns_1@10.242.238.88:<0.15426.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 388 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:06.156,ns_1@10.242.238.88:<0.15349.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 389 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:06.158,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 388 state to active [rebalance:info,2014-08-19T16:51:06.159,ns_1@10.242.238.88:<0.15426.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 388 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:06.159,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 389 state to active [rebalance:info,2014-08-19T16:51:06.160,ns_1@10.242.238.88:<0.15349.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 389 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:06.161,ns_1@10.242.238.88:<0.15426.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:06.161,ns_1@10.242.238.88:<0.15349.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:06.283,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 65. Nacking mccouch update. [views:debug,2014-08-19T16:51:06.283,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/65. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:06.284,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",65,active,0} [ns_server:debug,2014-08-19T16:51:06.286,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846,535, 480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351, 896,585,274,219,819,764,453,142,998,687,65,376,921,793,738,610,427,299,244, 116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635, 452,324,141,997,869,686,558,503,375,192,920,792,737,609,426,298,243,115,971, 843,660,532,477,349,166,894,89,711,583,400,272,217,1022,945,817,762,634,451, 323,140,996,868,685,557,502,374,191,919,791,736,608,425,297,242,114,970,842, 659,531,476,348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322,139, 995,867,684,556,501,373,190,918,790,735,607,424,296,241,113,969,841,658,530, 475,347,164,892,87,709,581,398,270,215,1020,943,815,760,632,449,321,138,994, 866,683,555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474, 346,163,891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993,865,682, 554,499,371,188,916,788,733,605,422,294,239,111,967,839,656,528,473,345,162, 890,85,707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681,553, 498,370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161,889, 706,578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497,369, 186,914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,83,705, 577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496,368,185, 913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393, 265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784, 729,601,418,290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264, 209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783,728, 600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936, 808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288, 233,105,961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807, 752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232, 960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440, 312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831, 648,520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673, 362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489, 178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,360, 905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176, 99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903, 592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97, 774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,356,901,590, 279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95,772, 717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,354,899,588,277, 222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93,770,715, 404,949,638,327,872,561,506,195,795,740,429,118,974,663,352,897,586,275,220, 820,765,454,143,999,688,377,922,611,300,245,845,534,479,168,91,768,713,402, 947,636,325,870,559,504,193] [rebalance:info,2014-08-19T16:51:06.323,ns_1@10.242.238.88:<0.15209.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 391 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:06.323,ns_1@10.242.238.88:<0.15286.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 390 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:06.323,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 391 state to active [rebalance:info,2014-08-19T16:51:06.324,ns_1@10.242.238.88:<0.15209.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 391 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:06.325,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 390 state to active [rebalance:info,2014-08-19T16:51:06.326,ns_1@10.242.238.88:<0.15286.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 390 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:06.326,ns_1@10.242.238.88:<0.15209.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:06.326,ns_1@10.242.238.88:<0.15286.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:51:06.367,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/65. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:06.367,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",65,active,0} [rebalance:info,2014-08-19T16:51:06.490,ns_1@10.242.238.88:<0.15129.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 392 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:06.490,ns_1@10.242.238.88:<0.15052.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 393 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:06.491,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 392 state to active [rebalance:info,2014-08-19T16:51:06.492,ns_1@10.242.238.88:<0.15129.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 392 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:06.492,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 393 state to active [rebalance:info,2014-08-19T16:51:06.493,ns_1@10.242.238.88:<0.15052.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 393 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:06.493,ns_1@10.242.238.88:<0.15129.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:06.494,ns_1@10.242.238.88:<0.15052.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:06.542,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 63. Nacking mccouch update. [views:debug,2014-08-19T16:51:06.543,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/63. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:06.543,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",63,active,0} [ns_server:debug,2014-08-19T16:51:06.545,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846,535, 480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351, 896,585,274,219,819,764,453,142,998,687,65,376,921,793,738,610,427,299,244, 116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635, 452,324,141,997,869,686,558,503,375,192,920,792,737,609,426,298,243,115,971, 843,660,532,477,349,166,894,89,711,583,400,272,217,1022,945,817,762,634,451, 323,140,996,868,685,63,557,502,374,191,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322, 139,995,867,684,556,501,373,190,918,790,735,607,424,296,241,113,969,841,658, 530,475,347,164,892,87,709,581,398,270,215,1020,943,815,760,632,449,321,138, 994,866,683,555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529, 474,346,163,891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993,865, 682,554,499,371,188,916,788,733,605,422,294,239,111,967,839,656,528,473,345, 162,890,85,707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681, 553,498,370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161, 889,706,578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497, 369,186,914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,83, 705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496,368, 185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576, 393,265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912, 784,729,601,418,290,235,107,963,835,652,524,469,341,158,886,81,703,575,392, 264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783, 728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013, 936,808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416, 288,233,105,961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935, 807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287, 232,960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623, 440,312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959, 831,648,520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984, 673,362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544, 489,178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671, 360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487, 176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,358, 903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174, 97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,356,901, 590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95, 772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,354,899,588, 277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93,770, 715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,352,897,586,275, 220,820,765,454,143,999,688,377,922,611,300,245,845,534,479,168,91,768,713, 402,947,636,325,870,559,504,193] [views:debug,2014-08-19T16:51:06.627,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/63. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:06.627,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",63,active,0} [rebalance:info,2014-08-19T16:51:06.658,ns_1@10.242.238.88:<0.14879.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 395 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:06.658,ns_1@10.242.238.88:<0.14956.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 394 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:06.658,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 395 state to active [rebalance:info,2014-08-19T16:51:06.659,ns_1@10.242.238.88:<0.14879.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 395 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:06.659,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 394 state to active [rebalance:info,2014-08-19T16:51:06.660,ns_1@10.242.238.88:<0.14956.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 394 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:06.661,ns_1@10.242.238.88:<0.14879.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:06.661,ns_1@10.242.238.88:<0.14956.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:06.802,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 61. Nacking mccouch update. [views:debug,2014-08-19T16:51:06.802,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/61. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:06.802,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",61,active,0} [ns_server:debug,2014-08-19T16:51:06.804,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846,535, 480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351, 896,585,274,219,819,764,453,142,998,687,65,376,921,793,738,610,427,299,244, 116,972,844,661,533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635, 452,324,141,997,869,686,558,503,375,192,920,792,737,609,426,298,243,115,971, 843,660,532,477,349,166,894,89,711,583,400,272,217,1022,945,817,762,634,451, 323,140,996,868,685,63,557,502,374,191,919,791,736,608,425,297,242,114,970, 842,659,531,476,348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322, 139,995,867,684,556,501,373,190,918,790,735,607,424,296,241,113,969,841,658, 530,475,347,164,892,87,709,581,398,270,215,1020,943,815,760,632,449,321,138, 994,866,683,61,555,500,372,189,917,789,734,606,423,295,240,112,968,840,657, 529,474,346,163,891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993, 865,682,554,499,371,188,916,788,733,605,422,294,239,111,967,839,656,528,473, 345,162,890,85,707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864, 681,553,498,370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344, 161,889,706,578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552, 497,369,186,914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888, 83,705,577,394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496, 368,185,913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704, 576,393,265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184, 912,784,729,601,418,290,235,107,963,835,652,524,469,341,158,886,81,703,575, 392,264,209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911, 783,728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208, 1013,936,808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599, 416,288,233,105,961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012, 935,807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415, 287,232,960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751, 623,440,312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103, 959,831,648,520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128, 984,673,362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855, 544,489,178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982, 671,360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542, 487,176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669, 358,903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485, 174,97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,356, 901,590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172, 95,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,354,899, 588,277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93, 770,715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,352,897,586, 275,220,820,765,454,143,999,688,377,922,611,300,245,845,534,479,168,91,768, 713,402,947,636,325,870,559,504,193] [rebalance:info,2014-08-19T16:51:06.816,ns_1@10.242.238.88:<0.16840.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 396) [rebalance:info,2014-08-19T16:51:06.816,ns_1@10.242.238.88:<0.14739.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 397 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:06.817,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 397 state to active [rebalance:info,2014-08-19T16:51:06.817,ns_1@10.242.238.88:<0.14802.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:06.818,ns_1@10.242.238.88:<0.14739.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 397 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:06.818,ns_1@10.242.238.88:<0.14739.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:51:06.820,ns_1@10.242.238.88:<0.14810.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_396_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:06.820,ns_1@10.242.238.88:<0.14802.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:51:06.823,ns_1@10.242.238.88:<0.14802.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 396 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.16847.1> [ns_server:info,2014-08-19T16:51:06.824,ns_1@10.242.238.88:<0.16847.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 396 to state replica [ns_server:debug,2014-08-19T16:51:06.854,ns_1@10.242.238.88:<0.16847.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_396 [rebalance:info,2014-08-19T16:51:06.856,ns_1@10.242.238.88:<0.16847.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[396]}, {checkpoints,[{396,1}]}, {name,<<"rebalance_396">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[396]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"396"}]} [rebalance:debug,2014-08-19T16:51:06.857,ns_1@10.242.238.88:<0.16847.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.16848.1> [rebalance:info,2014-08-19T16:51:06.858,ns_1@10.242.238.88:<0.16847.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:06.859,ns_1@10.242.238.88:<0.16847.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:06.859,ns_1@10.242.238.88:<0.16847.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:06.861,ns_1@10.242.238.88:<0.14802.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 396 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:06.862,ns_1@10.242.238.88:<0.14810.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:06.867,ns_1@10.242.238.88:<0.14810.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_396_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:06.867,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 396 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:06.868,ns_1@10.242.238.88:<0.16852.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 396 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [views:debug,2014-08-19T16:51:06.886,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/61. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:06.887,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",61,active,0} [ns_server:debug,2014-08-19T16:51:06.890,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:06.890,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:06.890,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{396, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:06.890,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:06.891,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:06.891,ns_1@10.242.238.88:<0.16856.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 399) [rebalance:info,2014-08-19T16:51:06.891,ns_1@10.242.238.88:<0.16857.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 400) [rebalance:info,2014-08-19T16:51:06.891,ns_1@10.242.238.88:<0.16858.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 401) [rebalance:info,2014-08-19T16:51:06.891,ns_1@10.242.238.88:<0.16859.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 398) [rebalance:info,2014-08-19T16:51:06.892,ns_1@10.242.238.88:<0.14585.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:06.892,ns_1@10.242.238.88:<0.14522.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:06.892,ns_1@10.242.238.88:<0.14445.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:06.893,ns_1@10.242.238.88:<0.14662.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [ns_server:info,2014-08-19T16:51:06.896,ns_1@10.242.238.88:<0.14593.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_399_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:06.896,ns_1@10.242.238.88:<0.14585.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:06.896,ns_1@10.242.238.88:<0.14453.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_401_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:06.897,ns_1@10.242.238.88:<0.14445.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:06.897,ns_1@10.242.238.88:<0.14530.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_400_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:06.897,ns_1@10.242.238.88:<0.14522.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:06.897,ns_1@10.242.238.88:<0.14670.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_398_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:06.897,ns_1@10.242.238.88:<0.14662.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:51:06.899,ns_1@10.242.238.88:<0.14585.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 399 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.16873.1> [ns_server:info,2014-08-19T16:51:06.900,ns_1@10.242.238.88:<0.16873.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 399 to state replica [ns_server:debug,2014-08-19T16:51:06.900,ns_1@10.242.238.88:<0.14522.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 400 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.16874.1> [ns_server:info,2014-08-19T16:51:06.901,ns_1@10.242.238.88:<0.16874.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 400 to state replica [ns_server:debug,2014-08-19T16:51:06.902,ns_1@10.242.238.88:<0.14445.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 401 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.16875.1> [rebalance:info,2014-08-19T16:51:06.902,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 396 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:06.902,ns_1@10.242.238.88:<0.14662.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 398 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.16876.1> [ns_server:debug,2014-08-19T16:51:06.903,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 396) [ns_server:info,2014-08-19T16:51:06.903,ns_1@10.242.238.88:<0.16875.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 401 to state replica [ns_server:info,2014-08-19T16:51:06.903,ns_1@10.242.238.88:<0.16876.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 398 to state replica [ns_server:debug,2014-08-19T16:51:06.903,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:51:06.925,ns_1@10.242.238.88:<0.16873.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_399 [rebalance:info,2014-08-19T16:51:06.928,ns_1@10.242.238.88:<0.16873.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[399]}, {checkpoints,[{399,1}]}, {name,<<"rebalance_399">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[399]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"399"}]} [rebalance:debug,2014-08-19T16:51:06.929,ns_1@10.242.238.88:<0.16873.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.16878.1> [rebalance:info,2014-08-19T16:51:06.930,ns_1@10.242.238.88:<0.16873.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:06.932,ns_1@10.242.238.88:<0.16873.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:06.932,ns_1@10.242.238.88:<0.16873.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:06.933,ns_1@10.242.238.88:<0.14585.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 399 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:06.934,ns_1@10.242.238.88:<0.14593.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:06.937,ns_1@10.242.238.88:<0.14593.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_399_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:06.938,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 399 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:06.938,ns_1@10.242.238.88:<0.16882.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 399 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:06.939,ns_1@10.242.238.88:<0.16875.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_401 [rebalance:info,2014-08-19T16:51:06.941,ns_1@10.242.238.88:<0.16875.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[401]}, {checkpoints,[{401,1}]}, {name,<<"rebalance_401">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[401]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"401"}]} [rebalance:debug,2014-08-19T16:51:06.941,ns_1@10.242.238.88:<0.16875.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.16883.1> [rebalance:info,2014-08-19T16:51:06.942,ns_1@10.242.238.88:<0.16875.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:06.945,ns_1@10.242.238.88:<0.16875.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:06.945,ns_1@10.242.238.88:<0.16875.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:06.946,ns_1@10.242.238.88:<0.14445.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 401 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:06.947,ns_1@10.242.238.88:<0.14453.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:06.951,ns_1@10.242.238.88:<0.14453.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_401_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:51:06.952,ns_1@10.242.238.88:<0.16874.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_400 [rebalance:info,2014-08-19T16:51:06.954,ns_1@10.242.238.88:<0.16874.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[400]}, {checkpoints,[{400,1}]}, {name,<<"rebalance_400">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[400]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"400"}]} [rebalance:debug,2014-08-19T16:51:06.954,ns_1@10.242.238.88:<0.16874.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.16886.1> [ns_server:debug,2014-08-19T16:51:06.955,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:06.956,ns_1@10.242.238.88:<0.16874.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:51:06.956,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{399, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:06.956,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:06.957,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:06.957,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:06.959,ns_1@10.242.238.88:<0.16874.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:06.959,ns_1@10.242.238.88:<0.16874.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:06.962,ns_1@10.242.238.88:<0.14522.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 400 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:info,2014-08-19T16:51:06.965,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 399 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:06.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 399) [rebalance:debug,2014-08-19T16:51:06.966,ns_1@10.242.238.88:<0.14530.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:06.966,ns_1@10.242.238.88:<0.16876.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_398 [ns_server:debug,2014-08-19T16:51:06.966,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:06.966,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 401 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:06.967,ns_1@10.242.238.88:<0.16897.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 401 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:info,2014-08-19T16:51:06.967,ns_1@10.242.238.88:<0.16876.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[398]}, {checkpoints,[{398,1}]}, {name,<<"rebalance_398">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[398]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"398"}]} [rebalance:debug,2014-08-19T16:51:06.968,ns_1@10.242.238.88:<0.16876.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.16898.1> [ns_server:info,2014-08-19T16:51:06.969,ns_1@10.242.238.88:<0.14530.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_400_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:06.969,ns_1@10.242.238.88:<0.16876.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:06.970,ns_1@10.242.238.88:<0.16876.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:06.970,ns_1@10.242.238.88:<0.16876.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:06.972,ns_1@10.242.238.88:<0.14662.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 398 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:06.973,ns_1@10.242.238.88:<0.14670.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:06.975,ns_1@10.242.238.88:<0.16912.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 402) [rebalance:info,2014-08-19T16:51:06.975,ns_1@10.242.238.88:<0.16913.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 403) [rebalance:info,2014-08-19T16:51:06.976,ns_1@10.242.238.88:<0.14368.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:06.976,ns_1@10.242.238.88:<0.14291.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [ns_server:info,2014-08-19T16:51:06.976,ns_1@10.242.238.88:<0.14670.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_398_'ns_1@10.242.238.90'">>] [ns_server:info,2014-08-19T16:51:06.979,ns_1@10.242.238.88:<0.14376.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_402_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:06.979,ns_1@10.242.238.88:<0.14368.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:06.979,ns_1@10.242.238.88:<0.14299.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_403_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:06.980,ns_1@10.242.238.88:<0.14291.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:51:06.982,ns_1@10.242.238.88:<0.14368.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 402 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.16923.1> [ns_server:debug,2014-08-19T16:51:06.982,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:06.983,ns_1@10.242.238.88:<0.14291.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 403 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.16924.1> [ns_server:debug,2014-08-19T16:51:06.983,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:info,2014-08-19T16:51:06.983,ns_1@10.242.238.88:<0.16923.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 402 to state replica [ns_server:info,2014-08-19T16:51:06.983,ns_1@10.242.238.88:<0.16924.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 403 to state replica [ns_server:debug,2014-08-19T16:51:06.983,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:06.984,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{401, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:06.984,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:06.990,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 401 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:06.991,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 401) [ns_server:debug,2014-08-19T16:51:06.991,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:06.992,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 400 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:06.992,ns_1@10.242.238.88:<0.16935.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 400 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:07.008,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.008,ns_1@10.242.238.88:<0.16923.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_402 [ns_server:debug,2014-08-19T16:51:07.008,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.008,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:07.009,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{400, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:07.009,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:07.011,ns_1@10.242.238.88:<0.16923.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[402]}, {checkpoints,[{402,1}]}, {name,<<"rebalance_402">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[402]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"402"}]} [rebalance:debug,2014-08-19T16:51:07.012,ns_1@10.242.238.88:<0.16923.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.16943.1> [rebalance:info,2014-08-19T16:51:07.013,ns_1@10.242.238.88:<0.16923.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:07.015,ns_1@10.242.238.88:<0.16923.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:07.015,ns_1@10.242.238.88:<0.16923.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:07.016,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 400 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:51:07.016,ns_1@10.242.238.88:<0.14368.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 402 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:07.017,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 400) [ns_server:debug,2014-08-19T16:51:07.017,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:07.017,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 398 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:07.017,ns_1@10.242.238.88:<0.16946.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 398 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:debug,2014-08-19T16:51:07.018,ns_1@10.242.238.88:<0.14376.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:07.024,ns_1@10.242.238.88:<0.16924.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_403 [ns_server:info,2014-08-19T16:51:07.024,ns_1@10.242.238.88:<0.14376.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_402_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:07.025,ns_1@10.242.238.88:<0.16924.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[403]}, {checkpoints,[{403,1}]}, {name,<<"rebalance_403">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[403]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"403"}]} [rebalance:debug,2014-08-19T16:51:07.026,ns_1@10.242.238.88:<0.16924.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.16950.1> [rebalance:info,2014-08-19T16:51:07.026,ns_1@10.242.238.88:<0.16924.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:07.028,ns_1@10.242.238.88:<0.16924.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:07.028,ns_1@10.242.238.88:<0.16924.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:07.029,ns_1@10.242.238.88:<0.14291.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 403 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:07.030,ns_1@10.242.238.88:<0.14299.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:07.033,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 59. Nacking mccouch update. [views:debug,2014-08-19T16:51:07.033,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/59. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:07.033,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",59,active,0} [ns_server:info,2014-08-19T16:51:07.033,ns_1@10.242.238.88:<0.14299.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_403_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:51:07.036,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:07.036,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{398, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:07.036,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846,535, 480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351, 896,585,274,219,819,764,453,142,998,687,65,376,921,610,299,244,972,844,661, 533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997, 869,686,558,503,375,192,920,792,737,609,426,298,243,115,971,843,660,532,477, 349,166,894,89,711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868, 685,63,557,502,374,191,919,791,736,608,425,297,242,114,970,842,659,531,476, 348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322,139,995,867,684, 556,501,373,190,918,790,735,607,424,296,241,113,969,841,658,530,475,347,164, 892,87,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,61, 555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,163, 891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993,865,682,554,499, 371,188,916,788,733,605,422,294,239,111,967,839,656,528,473,345,162,890,85, 707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681,59,553,498, 370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161,889,706, 578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497,369,186, 914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,83,705,577, 394,266,211,1016,939,811,756,628,445,317,134,990,862,679,551,496,368,185,913, 785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393,265, 210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784,729, 601,418,290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264,209, 1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783,728,600, 417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808, 753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233, 105,961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752, 624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232,960, 832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312, 129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648, 520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673,362, 907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178, 778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,360,905, 594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,99, 776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903,592, 281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97,774, 719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,356,901,590,279, 224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95,772,717, 406,951,640,329,874,563,508,197,797,742,431,120,976,665,354,899,588,277,222, 822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93,770,715,404, 949,638,327,872,561,506,195,795,740,429,118,974,663,352,897,586,275,220,820, 765,454,143,999,688,377,922,611,300,245,845,534,479,168,91,768,713,402,947, 636,325,870,559,504,193,793,738,427,116] [ns_server:debug,2014-08-19T16:51:07.037,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.038,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.038,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:07.045,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 398 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:07.046,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 398) [ns_server:debug,2014-08-19T16:51:07.046,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:07.046,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 402 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:07.046,ns_1@10.242.238.88:<0.16962.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 402 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:07.064,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.065,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{402, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:07.065,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:07.066,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.067,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:07.074,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 402 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:07.074,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 402) [ns_server:debug,2014-08-19T16:51:07.075,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:07.075,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 403 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:07.075,ns_1@10.242.238.88:<0.16973.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 403 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:07.090,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.091,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.091,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{403, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:07.091,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:07.092,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:07.106,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 403 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:07.107,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 403) [ns_server:debug,2014-08-19T16:51:07.108,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [views:debug,2014-08-19T16:51:07.109,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/59. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:07.109,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",59,active,0} [ns_server:debug,2014-08-19T16:51:07.192,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 57. Nacking mccouch update. [views:debug,2014-08-19T16:51:07.193,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/57. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:07.193,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",57,active,0} [ns_server:debug,2014-08-19T16:51:07.195,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846,535, 480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351, 896,585,274,219,819,764,453,142,998,687,65,376,921,610,299,244,972,844,661, 533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997, 869,686,558,503,375,192,920,792,737,609,426,298,243,115,971,843,660,532,477, 349,166,894,89,711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868, 685,63,557,502,374,191,919,791,736,608,425,297,242,114,970,842,659,531,476, 348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322,139,995,867,684, 556,501,373,190,918,790,735,607,424,296,241,113,969,841,658,530,475,347,164, 892,87,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,61, 555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,163, 891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993,865,682,554,499, 371,188,916,788,733,605,422,294,239,111,967,839,656,528,473,345,162,890,85, 707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681,59,553,498, 370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161,889,706, 578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497,369,186, 914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,83,705,577, 394,266,211,1016,939,811,756,628,445,317,134,990,862,679,57,551,496,368,185, 913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393, 265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784, 729,601,418,290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264, 209,1014,937,809,754,626,443,315,132,988,860,677,549,494,366,183,911,783,728, 600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936, 808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288, 233,105,961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807, 752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287,232, 960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440, 312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831, 648,520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673, 362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489, 178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,360, 905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176, 99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,358,903, 592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97, 774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,356,901,590, 279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95,772, 717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,354,899,588,277, 222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93,770,715, 404,949,638,327,872,561,506,195,795,740,429,118,974,663,352,897,586,275,220, 820,765,454,143,999,688,377,922,611,300,245,845,534,479,168,91,768,713,402, 947,636,325,870,559,504,193,793,738,427,116] [views:debug,2014-08-19T16:51:07.226,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/57. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:07.226,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",57,active,0} [rebalance:info,2014-08-19T16:51:07.298,ns_1@10.242.238.88:<0.17016.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 405) [rebalance:info,2014-08-19T16:51:07.298,ns_1@10.242.238.88:<0.17017.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 404) [rebalance:info,2014-08-19T16:51:07.299,ns_1@10.242.238.88:<0.15657.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 640 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:07.299,ns_1@10.242.238.88:<0.15615.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 641 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:07.299,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 640 state to active [rebalance:info,2014-08-19T16:51:07.299,ns_1@10.242.238.88:<0.14136.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:07.300,ns_1@10.242.238.88:<0.14214.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:07.300,ns_1@10.242.238.88:<0.15657.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 640 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:07.300,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 641 state to active [ns_server:debug,2014-08-19T16:51:07.301,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 55. Nacking mccouch update. [views:debug,2014-08-19T16:51:07.301,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/55. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:07.302,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",55,active,0} [rebalance:info,2014-08-19T16:51:07.302,ns_1@10.242.238.88:<0.15615.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 641 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:07.302,ns_1@10.242.238.88:<0.15657.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:07.302,ns_1@10.242.238.88:<0.15615.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:51:07.304,ns_1@10.242.238.88:<0.14144.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_405_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:07.304,ns_1@10.242.238.88:<0.14136.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:07.304,ns_1@10.242.238.88:<0.14222.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_404_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:07.304,ns_1@10.242.238.88:<0.14214.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:51:07.304,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846,535, 480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351, 896,585,274,219,819,764,453,142,998,687,65,376,921,610,299,244,972,844,661, 533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997, 869,686,558,503,375,192,920,792,737,609,426,298,243,115,971,843,660,532,477, 349,166,894,89,711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868, 685,63,557,502,374,191,919,791,736,608,425,297,242,114,970,842,659,531,476, 348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322,139,995,867,684, 556,501,373,190,918,790,735,607,424,296,241,113,969,841,658,530,475,347,164, 892,87,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,61, 555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,163, 891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993,865,682,554,499, 371,188,916,788,733,605,422,294,239,111,967,839,656,528,473,345,162,890,85, 707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681,59,553,498, 370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161,889,706, 578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497,369,186, 914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,83,705,577, 394,266,211,1016,939,811,756,628,445,317,134,990,862,679,57,551,496,368,185, 913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393, 265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784, 729,601,418,290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264, 209,1014,937,809,754,626,443,315,132,988,860,677,55,549,494,366,183,911,783, 728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013, 936,808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416, 288,233,105,961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935, 807,752,624,441,313,130,986,858,675,547,492,364,181,909,781,726,598,415,287, 232,960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623, 440,312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959, 831,648,520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984, 673,362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544, 489,178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671, 360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487, 176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,358, 903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174, 97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,356,901, 590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95, 772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,354,899,588, 277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93,770, 715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,352,897,586,275, 220,820,765,454,143,999,688,377,922,611,300,245,845,534,479,168,91,768,713, 402,947,636,325,870,559,504,193,793,738,427,116] [ns_server:debug,2014-08-19T16:51:07.307,ns_1@10.242.238.88:<0.14136.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 405 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.17030.1> [ns_server:debug,2014-08-19T16:51:07.307,ns_1@10.242.238.88:<0.14214.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 404 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.17031.1> [ns_server:info,2014-08-19T16:51:07.307,ns_1@10.242.238.88:<0.17030.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 405 to state replica [ns_server:info,2014-08-19T16:51:07.308,ns_1@10.242.238.88:<0.17031.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 404 to state replica [ns_server:debug,2014-08-19T16:51:07.333,ns_1@10.242.238.88:<0.17030.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_405 [rebalance:info,2014-08-19T16:51:07.335,ns_1@10.242.238.88:<0.17030.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[405]}, {checkpoints,[{405,1}]}, {name,<<"rebalance_405">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[405]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"405"}]} [rebalance:debug,2014-08-19T16:51:07.336,ns_1@10.242.238.88:<0.17030.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17032.1> [rebalance:info,2014-08-19T16:51:07.336,ns_1@10.242.238.88:<0.17030.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:07.338,ns_1@10.242.238.88:<0.17030.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:07.338,ns_1@10.242.238.88:<0.17030.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:07.339,ns_1@10.242.238.88:<0.14136.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 405 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:07.341,ns_1@10.242.238.88:<0.14144.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:07.344,ns_1@10.242.238.88:<0.14144.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_405_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:07.345,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 405 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:07.345,ns_1@10.242.238.88:<0.17036.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 405 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:07.347,ns_1@10.242.238.88:<0.17031.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_404 [rebalance:info,2014-08-19T16:51:07.348,ns_1@10.242.238.88:<0.17031.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[404]}, {checkpoints,[{404,1}]}, {name,<<"rebalance_404">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[404]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"404"}]} [rebalance:debug,2014-08-19T16:51:07.349,ns_1@10.242.238.88:<0.17031.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17037.1> [rebalance:info,2014-08-19T16:51:07.350,ns_1@10.242.238.88:<0.17031.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:07.351,ns_1@10.242.238.88:<0.17031.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:07.351,ns_1@10.242.238.88:<0.17031.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:07.352,ns_1@10.242.238.88:<0.14214.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 404 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:07.354,ns_1@10.242.238.88:<0.14222.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:07.357,ns_1@10.242.238.88:<0.14222.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_404_'ns_1@10.242.238.90'">>] [views:debug,2014-08-19T16:51:07.360,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/55. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:07.361,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",55,active,0} [ns_server:debug,2014-08-19T16:51:07.363,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.363,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:07.364,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.364,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{405, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:07.364,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:07.364,ns_1@10.242.238.88:<0.15545.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 642 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:07.364,ns_1@10.242.238.88:<0.15468.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 643 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:07.365,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 642 state to active [rebalance:info,2014-08-19T16:51:07.366,ns_1@10.242.238.88:<0.15545.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 642 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:07.366,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 643 state to active [rebalance:info,2014-08-19T16:51:07.368,ns_1@10.242.238.88:<0.15468.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 643 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:07.368,ns_1@10.242.238.88:<0.15545.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:07.368,ns_1@10.242.238.88:<0.15468.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:07.370,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 405 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:07.371,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 405) [ns_server:debug,2014-08-19T16:51:07.371,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:07.371,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 404 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:07.371,ns_1@10.242.238.88:<0.17058.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 404 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:07.387,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.388,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.388,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:07.388,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{404, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:07.388,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:07.394,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 404 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:07.395,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 404) [ns_server:debug,2014-08-19T16:51:07.395,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:51:07.460,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 53. Nacking mccouch update. [views:debug,2014-08-19T16:51:07.461,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/53. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:07.461,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",53,active,0} [ns_server:debug,2014-08-19T16:51:07.464,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846,535, 480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351, 896,585,274,219,819,764,453,142,998,687,65,376,921,610,299,244,972,844,661, 533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997, 869,686,558,503,375,192,920,792,737,609,426,298,243,115,971,843,660,532,477, 349,166,894,89,711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868, 685,63,557,502,374,191,919,791,736,608,425,297,242,114,970,842,659,531,476, 348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322,139,995,867,684, 556,501,373,190,918,790,735,607,424,296,241,113,969,841,658,530,475,347,164, 892,87,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,61, 555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,163, 891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993,865,682,554,499, 371,188,916,788,733,605,422,294,239,111,967,839,656,528,473,345,162,890,85, 707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681,59,553,498, 370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161,889,706, 578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497,369,186, 914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,83,705,577, 394,266,211,1016,939,811,756,628,445,317,134,990,862,679,57,551,496,368,185, 913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393, 265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784, 729,601,418,290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264, 209,1014,937,809,754,626,443,315,132,988,860,677,55,549,494,366,183,911,783, 728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013, 936,808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416, 288,233,105,961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935, 807,752,624,441,313,130,986,858,675,547,53,492,364,181,909,781,726,598,415, 287,232,960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751, 623,440,312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103, 959,831,648,520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128, 984,673,362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855, 544,489,178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982, 671,360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542, 487,176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669, 358,903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485, 174,97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,356, 901,590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172, 95,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,354,899, 588,277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93, 770,715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,352,897,586, 275,220,820,765,454,143,999,688,377,922,611,300,245,845,534,479,168,91,768, 713,402,947,636,325,870,559,504,193,793,738,427,116] [views:debug,2014-08-19T16:51:07.494,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/53. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:07.494,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",53,active,0} [rebalance:info,2014-08-19T16:51:07.500,ns_1@10.242.238.88:<0.15328.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 645 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:07.500,ns_1@10.242.238.88:<0.15405.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 644 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:07.500,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 645 state to active [rebalance:info,2014-08-19T16:51:07.502,ns_1@10.242.238.88:<0.15328.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 645 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:07.502,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 644 state to active [rebalance:info,2014-08-19T16:51:07.503,ns_1@10.242.238.88:<0.15405.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 644 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:07.503,ns_1@10.242.238.88:<0.15328.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:07.504,ns_1@10.242.238.88:<0.15405.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:07.570,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 51. Nacking mccouch update. [views:debug,2014-08-19T16:51:07.570,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/51. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:07.570,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",51,active,0} [ns_server:debug,2014-08-19T16:51:07.572,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846,535, 480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351, 896,585,274,219,819,764,453,142,998,687,65,376,921,610,299,244,972,844,661, 533,478,350,167,895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997, 869,686,558,503,375,192,920,792,737,609,426,298,243,115,971,843,660,532,477, 349,166,894,89,711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868, 685,63,557,502,374,191,919,791,736,608,425,297,242,114,970,842,659,531,476, 348,165,893,710,582,399,271,216,1021,944,816,761,633,450,322,139,995,867,684, 556,501,373,190,918,790,735,607,424,296,241,113,969,841,658,530,475,347,164, 892,87,709,581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,61, 555,500,372,189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,163, 891,708,580,397,269,214,1019,942,814,759,631,448,320,137,993,865,682,554,499, 371,188,916,788,733,605,422,294,239,111,967,839,656,528,473,345,162,890,85, 707,579,396,268,213,1018,941,813,758,630,447,319,136,992,864,681,59,553,498, 370,187,915,787,732,604,421,293,238,110,966,838,655,527,472,344,161,889,706, 578,395,267,212,1017,940,812,757,629,446,318,135,991,863,680,552,497,369,186, 914,786,731,603,420,292,237,109,965,837,654,526,471,343,160,888,83,705,577, 394,266,211,1016,939,811,756,628,445,317,134,990,862,679,57,551,496,368,185, 913,785,730,602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393, 265,210,1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784, 729,601,418,290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264, 209,1014,937,809,754,626,443,315,132,988,860,677,55,549,494,366,183,911,783, 728,600,417,289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013, 936,808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416, 288,233,105,961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935, 807,752,624,441,313,130,986,858,675,547,53,492,364,181,909,781,726,598,415, 287,232,960,832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751, 623,440,312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103, 959,831,648,520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128, 984,673,51,362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255, 855,544,489,178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126, 982,671,360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853, 542,487,176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980, 669,358,903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540, 485,174,97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667, 356,901,590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483, 172,95,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,354, 899,588,277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170, 93,770,715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,352,897, 586,275,220,820,765,454,143,999,688,377,922,611,300,245,845,534,479,168,91, 768,713,402,947,636,325,870,559,504,193,793,738,427,116] [rebalance:info,2014-08-19T16:51:07.583,ns_1@10.242.238.88:<0.15188.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 647 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:07.583,ns_1@10.242.238.88:<0.15251.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 646 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:07.584,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 647 state to active [rebalance:info,2014-08-19T16:51:07.585,ns_1@10.242.238.88:<0.15188.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 647 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:07.585,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 646 state to active [rebalance:info,2014-08-19T16:51:07.586,ns_1@10.242.238.88:<0.15251.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 646 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:07.586,ns_1@10.242.238.88:<0.15188.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:07.587,ns_1@10.242.238.88:<0.15251.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:07.667,ns_1@10.242.238.88:<0.15094.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 648 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:07.667,ns_1@10.242.238.88:<0.15017.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 649 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:07.667,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 648 state to active [rebalance:info,2014-08-19T16:51:07.668,ns_1@10.242.238.88:<0.15094.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 648 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:07.668,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 649 state to active [rebalance:info,2014-08-19T16:51:07.669,ns_1@10.242.238.88:<0.15017.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 649 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:07.670,ns_1@10.242.238.88:<0.15094.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:07.670,ns_1@10.242.238.88:<0.15017.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:51:07.703,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/51. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:07.703,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",51,active,0} [rebalance:info,2014-08-19T16:51:07.777,ns_1@10.242.238.88:<0.14935.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 650 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:07.777,ns_1@10.242.238.88:<0.14858.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 651 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:07.777,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 650 state to active [rebalance:info,2014-08-19T16:51:07.778,ns_1@10.242.238.88:<0.14935.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 650 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:07.778,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 651 state to active [rebalance:info,2014-08-19T16:51:07.780,ns_1@10.242.238.88:<0.14858.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 651 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:07.780,ns_1@10.242.238.88:<0.14935.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:07.780,ns_1@10.242.238.88:<0.14858.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:07.878,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 49. Nacking mccouch update. [views:debug,2014-08-19T16:51:07.878,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/49. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:07.879,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",49,active,0} [ns_server:debug,2014-08-19T16:51:07.880,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846,535, 480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351, 896,585,274,219,819,764,453,142,998,687,65,376,921,610,299,244,844,533,478, 167,895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997,869,686,558, 503,375,192,920,792,737,609,426,298,243,115,971,843,660,532,477,349,166,894, 89,711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,63,557, 502,374,191,919,791,736,608,425,297,242,114,970,842,659,531,476,348,165,893, 710,582,399,271,216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373, 190,918,790,735,607,424,296,241,113,969,841,658,530,475,347,164,892,87,709, 581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,61,555,500,372, 189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,163,891,708,580, 397,269,214,1019,942,814,759,631,448,320,137,993,865,682,554,499,371,188,916, 788,733,605,422,294,239,111,967,839,656,528,473,345,162,890,85,707,579,396, 268,213,1018,941,813,758,630,447,319,136,992,864,681,59,553,498,370,187,915, 787,732,604,421,293,238,110,966,838,655,527,472,344,161,889,706,578,395,267, 212,1017,940,812,757,629,446,318,135,991,863,680,552,497,369,186,914,786,731, 603,420,292,237,109,965,837,654,526,471,343,160,888,83,705,577,394,266,211, 1016,939,811,756,628,445,317,134,990,862,679,57,551,496,368,185,913,785,730, 602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393,265,210,1015, 938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784,729,601,418, 290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937, 809,754,626,443,315,132,988,860,677,55,549,494,366,183,911,783,728,600,417, 289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808,753, 625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105, 961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,53,492,364,181,909,781,726,598,415,287,232,960, 832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312, 129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648, 520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673,51, 362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489, 178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,49, 360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487, 176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,358, 903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174, 97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,356,901, 590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95, 772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,354,899,588, 277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93,770, 715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,352,897,586,275, 220,820,765,454,143,999,688,377,922,611,300,245,845,534,479,168,91,768,713, 402,947,636,325,870,559,504,193,793,738,427,116,972,661,350] [rebalance:info,2014-08-19T16:51:07.885,ns_1@10.242.238.88:<0.17142.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 653) [rebalance:info,2014-08-19T16:51:07.885,ns_1@10.242.238.88:<0.17143.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 659) [rebalance:info,2014-08-19T16:51:07.886,ns_1@10.242.238.88:<0.17144.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 655) [rebalance:info,2014-08-19T16:51:07.886,ns_1@10.242.238.88:<0.17145.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 660) [rebalance:info,2014-08-19T16:51:07.886,ns_1@10.242.238.88:<0.17146.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 658) [rebalance:info,2014-08-19T16:51:07.886,ns_1@10.242.238.88:<0.17147.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 657) [rebalance:info,2014-08-19T16:51:07.886,ns_1@10.242.238.88:<0.17148.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 656) [rebalance:info,2014-08-19T16:51:07.886,ns_1@10.242.238.88:<0.17149.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 652) [rebalance:info,2014-08-19T16:51:07.886,ns_1@10.242.238.88:<0.14704.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:07.886,ns_1@10.242.238.88:<0.17150.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 654) [rebalance:info,2014-08-19T16:51:07.886,ns_1@10.242.238.88:<0.15524.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 896 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:07.887,ns_1@10.242.238.88:<0.17151.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 661) [ns_server:info,2014-08-19T16:51:07.887,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 896 state to active [rebalance:info,2014-08-19T16:51:07.887,ns_1@10.242.238.88:<0.15447.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 897 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:07.887,ns_1@10.242.238.88:<0.14270.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:07.887,ns_1@10.242.238.88:<0.14564.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:07.888,ns_1@10.242.238.88:<0.14193.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:07.888,ns_1@10.242.238.88:<0.14347.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:07.888,ns_1@10.242.238.88:<0.15524.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 896 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:07.888,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 897 state to active [rebalance:info,2014-08-19T16:51:07.888,ns_1@10.242.238.88:<0.14424.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:07.888,ns_1@10.242.238.88:<0.14495.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:07.888,ns_1@10.242.238.88:<0.14641.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:07.889,ns_1@10.242.238.88:<0.14781.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:07.889,ns_1@10.242.238.88:<0.14101.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:07.889,ns_1@10.242.238.88:<0.15447.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 897 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:07.890,ns_1@10.242.238.88:<0.15524.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:07.890,ns_1@10.242.238.88:<0.15447.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:51:07.892,ns_1@10.242.238.88:<0.14712.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_653_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:07.892,ns_1@10.242.238.88:<0.14704.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:07.893,ns_1@10.242.238.88:<0.14278.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_659_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:07.893,ns_1@10.242.238.88:<0.14270.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:07.894,ns_1@10.242.238.88:<0.14572.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_655_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:07.894,ns_1@10.242.238.88:<0.14564.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:07.895,ns_1@10.242.238.88:<0.14355.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_658_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:07.895,ns_1@10.242.238.88:<0.14347.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:07.895,ns_1@10.242.238.88:<0.14201.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_660_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:07.896,ns_1@10.242.238.88:<0.14193.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:07.896,ns_1@10.242.238.88:<0.14432.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_657_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:07.896,ns_1@10.242.238.88:<0.14424.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:07.896,ns_1@10.242.238.88:<0.14509.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_656_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:07.896,ns_1@10.242.238.88:<0.14495.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:07.897,ns_1@10.242.238.88:<0.14649.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_654_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:07.897,ns_1@10.242.238.88:<0.14641.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:07.897,ns_1@10.242.238.88:<0.14109.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_661_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:07.897,ns_1@10.242.238.88:<0.14101.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:07.898,ns_1@10.242.238.88:<0.14789.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_652_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:07.899,ns_1@10.242.238.88:<0.14781.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:51:07.902,ns_1@10.242.238.88:<0.14704.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 653 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17180.1> [ns_server:info,2014-08-19T16:51:07.902,ns_1@10.242.238.88:<0.17180.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 653 to state replica [ns_server:debug,2014-08-19T16:51:07.906,ns_1@10.242.238.88:<0.14270.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 659 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17181.1> [ns_server:info,2014-08-19T16:51:07.907,ns_1@10.242.238.88:<0.17181.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 659 to state replica [ns_server:debug,2014-08-19T16:51:07.908,ns_1@10.242.238.88:<0.14564.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 655 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17182.1> [ns_server:debug,2014-08-19T16:51:07.908,ns_1@10.242.238.88:<0.14347.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 658 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17183.1> [ns_server:debug,2014-08-19T16:51:07.908,ns_1@10.242.238.88:<0.14193.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 660 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17184.1> [ns_server:debug,2014-08-19T16:51:07.908,ns_1@10.242.238.88:<0.14424.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 657 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17185.1> [ns_server:debug,2014-08-19T16:51:07.908,ns_1@10.242.238.88:<0.14495.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 656 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17186.1> [ns_server:debug,2014-08-19T16:51:07.908,ns_1@10.242.238.88:<0.14641.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 654 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17187.1> [ns_server:debug,2014-08-19T16:51:07.908,ns_1@10.242.238.88:<0.14101.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 661 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17188.1> [ns_server:info,2014-08-19T16:51:07.909,ns_1@10.242.238.88:<0.17185.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 657 to state replica [ns_server:info,2014-08-19T16:51:07.909,ns_1@10.242.238.88:<0.17186.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 656 to state replica [ns_server:info,2014-08-19T16:51:07.909,ns_1@10.242.238.88:<0.17182.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 655 to state replica [ns_server:info,2014-08-19T16:51:07.909,ns_1@10.242.238.88:<0.17183.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 658 to state replica [ns_server:debug,2014-08-19T16:51:07.909,ns_1@10.242.238.88:<0.14781.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 652 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17189.1> [ns_server:info,2014-08-19T16:51:07.909,ns_1@10.242.238.88:<0.17184.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 660 to state replica [ns_server:info,2014-08-19T16:51:07.909,ns_1@10.242.238.88:<0.17187.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 654 to state replica [ns_server:info,2014-08-19T16:51:07.909,ns_1@10.242.238.88:<0.17188.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 661 to state replica [ns_server:info,2014-08-19T16:51:07.910,ns_1@10.242.238.88:<0.17189.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 652 to state replica [ns_server:debug,2014-08-19T16:51:07.926,ns_1@10.242.238.88:<0.17180.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_653 [rebalance:info,2014-08-19T16:51:07.927,ns_1@10.242.238.88:<0.17180.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[653]}, {checkpoints,[{653,1}]}, {name,<<"rebalance_653">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[653]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"653"}]} [rebalance:debug,2014-08-19T16:51:07.928,ns_1@10.242.238.88:<0.17180.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17190.1> [rebalance:info,2014-08-19T16:51:07.930,ns_1@10.242.238.88:<0.17180.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:07.932,ns_1@10.242.238.88:<0.17180.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:07.932,ns_1@10.242.238.88:<0.17180.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:07.933,ns_1@10.242.238.88:<0.14704.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 653 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:07.935,ns_1@10.242.238.88:<0.14712.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:07.938,ns_1@10.242.238.88:<0.14712.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_653_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:07.939,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 653 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:07.939,ns_1@10.242.238.88:<0.17194.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 653 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:07.940,ns_1@10.242.238.88:<0.17181.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_659 [rebalance:info,2014-08-19T16:51:07.941,ns_1@10.242.238.88:<0.17181.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[659]}, {checkpoints,[{659,1}]}, {name,<<"rebalance_659">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[659]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"659"}]} [rebalance:debug,2014-08-19T16:51:07.942,ns_1@10.242.238.88:<0.17181.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17195.1> [rebalance:info,2014-08-19T16:51:07.943,ns_1@10.242.238.88:<0.17181.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:07.945,ns_1@10.242.238.88:<0.17181.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:07.945,ns_1@10.242.238.88:<0.17181.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:07.946,ns_1@10.242.238.88:<0.14270.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 659 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:07.947,ns_1@10.242.238.88:<0.14278.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:07.951,ns_1@10.242.238.88:<0.17182.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_655 [rebalance:info,2014-08-19T16:51:07.953,ns_1@10.242.238.88:<0.17182.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[655]}, {checkpoints,[{655,1}]}, {name,<<"rebalance_655">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[655]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"655"}]} [ns_server:debug,2014-08-19T16:51:07.949,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,384, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_384_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_384_'ns_1@10.242.238.90'">>}]}, {move_state,640, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_640_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_640_'ns_1@10.242.238.89'">>}]}, {move_state,385, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_385_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_385_'ns_1@10.242.238.90'">>}]}, {move_state,641, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_641_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_641_'ns_1@10.242.238.89'">>}]}, {move_state,386, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_386_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_386_'ns_1@10.242.238.90'">>}]}, {move_state,642, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_642_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_642_'ns_1@10.242.238.89'">>}]}, {move_state,896, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_896_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_896_'ns_1@10.242.238.89'">>}]}, {move_state,387, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_387_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_387_'ns_1@10.242.238.90'">>}]}, {move_state,643, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_643_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_643_'ns_1@10.242.238.89'">>}]}, {move_state,897, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_897_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_897_'ns_1@10.242.238.89'">>}]}, {move_state,388, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_388_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_388_'ns_1@10.242.238.90'">>}]}, {move_state,644, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_644_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_644_'ns_1@10.242.238.89'">>}]}, {move_state,898, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_898_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_898_'ns_1@10.242.238.89'">>}]}, {move_state,389, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_389_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_389_'ns_1@10.242.238.90'">>}]}, {move_state,645, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_645_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_645_'ns_1@10.242.238.89'">>}]}, {move_state,899, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_899_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_899_'ns_1@10.242.238.89'">>}]}, {move_state,390, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_390_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_390_'ns_1@10.242.238.90'">>}]}, {move_state,646, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_646_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_646_'ns_1@10.242.238.89'">>}]}, {move_state,900, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_900_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_900_'ns_1@10.242.238.89'">>}]}, {move_state,391, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_391_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_391_'ns_1@10.242.238.90'">>}]}, {move_state,647, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_647_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_647_'ns_1@10.242.238.89'">>}]}, {move_state,901, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_901_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_901_'ns_1@10.242.238.89'">>}]}, {move_state,392, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_392_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_392_'ns_1@10.242.238.90'">>}]}, {move_state,648, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_648_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_648_'ns_1@10.242.238.89'">>}]}, {move_state,902, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_902_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_902_'ns_1@10.242.238.89'">>}]}, {move_state,393, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_393_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_393_'ns_1@10.242.238.90'">>}]}, {move_state,649, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_649_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_649_'ns_1@10.242.238.89'">>}]}, {move_state,903, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_903_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_903_'ns_1@10.242.238.89'">>}]}, {move_state,394, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_394_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_394_'ns_1@10.242.238.90'">>}]}, {move_state,650, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_650_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_650_'ns_1@10.242.238.89'">>}]}, {move_state,904, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_904_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_904_'ns_1@10.242.238.89'">>}]}, {move_state,395, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_395_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_395_'ns_1@10.242.238.90'">>}]}, {move_state,651, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_651_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_651_'ns_1@10.242.238.89'">>}]}, {move_state,905, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_905_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_905_'ns_1@10.242.238.89'">>}]}, {move_state,652, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_652_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_652_'ns_1@10.242.238.89'">>}]}, {move_state,906, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_906_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_906_'ns_1@10.242.238.89'">>}]}, {move_state,397, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_397_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_397_'ns_1@10.242.238.90'">>}]}, {move_state,653, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_653_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_653_'ns_1@10.242.238.89'">>}]}, {move_state,907, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_907_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_907_'ns_1@10.242.238.89'">>}]}, {move_state,654, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_654_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_654_'ns_1@10.242.238.89'">>}]}, {move_state,908, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_908_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_908_'ns_1@10.242.238.89'">>}]}, {move_state,655, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_655_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_655_'ns_1@10.242.238.89'">>}]}, {move_state,909, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_909_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_909_'ns_1@10.242.238.89'">>}]}, {move_state,656, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_656_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_656_'ns_1@10.242.238.89'">>}]}, {move_state,910, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_910_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_910_'ns_1@10.242.238.89'">>}]}, {move_state,657, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_657_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_657_'ns_1@10.242.238.89'">>}]}, {move_state,911, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_911_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_911_'ns_1@10.242.238.89'">>}]}, {move_state,658, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_658_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_658_'ns_1@10.242.238.89'">>}]}, {move_state,912, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_912_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_912_'ns_1@10.242.238.89'">>}]}, {move_state,659, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_659_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_659_'ns_1@10.242.238.89'">>}]}, {move_state,913, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_913_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_913_'ns_1@10.242.238.89'">>}]}, {move_state,660, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_660_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_660_'ns_1@10.242.238.89'">>}]}, {move_state,914, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_914_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_914_'ns_1@10.242.238.89'">>}]}, {move_state,661, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_661_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_661_'ns_1@10.242.238.89'">>}]}, {move_state,915, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_915_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_915_'ns_1@10.242.238.89'">>}]}] [rebalance:debug,2014-08-19T16:51:07.954,ns_1@10.242.238.88:<0.17182.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17197.1> [ns_server:debug,2014-08-19T16:51:07.954,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 384, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:07.955,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 640, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [rebalance:info,2014-08-19T16:51:07.955,ns_1@10.242.238.88:<0.17182.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:51:07.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 385, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:info,2014-08-19T16:51:07.956,ns_1@10.242.238.88:<0.14278.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_659_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:07.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 641, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [rebalance:debug,2014-08-19T16:51:07.957,ns_1@10.242.238.88:<0.17182.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:07.957,ns_1@10.242.238.88:<0.17182.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:51:07.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 386, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:07.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 642, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [rebalance:info,2014-08-19T16:51:07.959,ns_1@10.242.238.88:<0.14564.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 655 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:07.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 896, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:07.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 387, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:07.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 643, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [rebalance:debug,2014-08-19T16:51:07.960,ns_1@10.242.238.88:<0.14572.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:07.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 897, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:07.962,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 388, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:07.962,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:07.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 644, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:07.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 898, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:07.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 389, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:07.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 645, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:07.965,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:51:07.963,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/49. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:07.965,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",49,active,0} [ns_server:debug,2014-08-19T16:51:07.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 899, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:07.965,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{653, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:07.966,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:07.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 390, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:07.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 646, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:info,2014-08-19T16:51:07.969,ns_1@10.242.238.88:<0.14572.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_655_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:07.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 900, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:07.970,ns_1@10.242.238.88:<0.17184.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_660 [ns_server:debug,2014-08-19T16:51:07.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 391, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:07.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 647, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:07.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 901, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [rebalance:info,2014-08-19T16:51:07.971,ns_1@10.242.238.88:<0.17184.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[660]}, {checkpoints,[{660,1}]}, {name,<<"rebalance_660">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[660]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"660"}]} [ns_server:debug,2014-08-19T16:51:07.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 392, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [rebalance:debug,2014-08-19T16:51:07.972,ns_1@10.242.238.88:<0.17184.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17233.1> [ns_server:debug,2014-08-19T16:51:07.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 648, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [rebalance:info,2014-08-19T16:51:07.972,ns_1@10.242.238.88:<0.17184.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:51:07.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 902, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [rebalance:info,2014-08-19T16:51:07.973,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 653 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:debug,2014-08-19T16:51:07.974,ns_1@10.242.238.88:<0.17184.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:07.974,ns_1@10.242.238.88:<0.17184.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:51:07.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 393, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:07.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 653) [ns_server:debug,2014-08-19T16:51:07.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 649, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:07.976,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:07.976,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 659 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [ns_server:debug,2014-08-19T16:51:07.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 903, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [rebalance:info,2014-08-19T16:51:07.976,ns_1@10.242.238.88:<0.17241.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 659 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:07.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 394, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:07.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 650, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [rebalance:info,2014-08-19T16:51:07.977,ns_1@10.242.238.88:<0.14193.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 660 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:07.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 904, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:07.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 395, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:07.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 651, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [rebalance:debug,2014-08-19T16:51:07.979,ns_1@10.242.238.88:<0.14201.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:07.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 905, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:07.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 652, [{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:07.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 906, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:07.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 397, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:info,2014-08-19T16:51:07.982,ns_1@10.242.238.88:<0.14201.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_660_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:07.982,ns_1@10.242.238.88:<0.17189.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_652 [ns_server:debug,2014-08-19T16:51:07.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 907, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:07.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 654, [{'ns_1@10.242.238.89',0}] [rebalance:info,2014-08-19T16:51:07.984,ns_1@10.242.238.88:<0.17189.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[652]}, {checkpoints,[{652,1}]}, {name,<<"rebalance_652">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[652]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"652"}]} [ns_server:debug,2014-08-19T16:51:07.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 908, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [rebalance:debug,2014-08-19T16:51:07.985,ns_1@10.242.238.88:<0.17189.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17257.1> [rebalance:info,2014-08-19T16:51:07.985,ns_1@10.242.238.88:<0.17189.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:51:07.985,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 909, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:07.986,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 656, [{'ns_1@10.242.238.89',0}] [rebalance:debug,2014-08-19T16:51:07.987,ns_1@10.242.238.88:<0.17189.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:07.987,ns_1@10.242.238.88:<0.17189.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:51:07.987,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 910, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [rebalance:info,2014-08-19T16:51:07.988,ns_1@10.242.238.88:<0.14781.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 652 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:07.988,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 657, [{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:07.988,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 911, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:07.989,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 658, [{'ns_1@10.242.238.89',0}] [rebalance:debug,2014-08-19T16:51:07.989,ns_1@10.242.238.88:<0.14789.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:07.989,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 912, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:07.990,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 913, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:07.991,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 914, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:07.992,ns_1@10.242.238.88:<0.17188.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_661 [ns_server:debug,2014-08-19T16:51:07.992,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 661, [{'ns_1@10.242.238.89',0}] [ns_server:info,2014-08-19T16:51:07.992,ns_1@10.242.238.88:<0.14789.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_652_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:07.993,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 915, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:07.993,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:07.993,ns_1@10.242.238.88:<0.17188.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[661]}, {checkpoints,[{661,1}]}, {name,<<"rebalance_661">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[661]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"661"}]} [rebalance:debug,2014-08-19T16:51:07.994,ns_1@10.242.238.88:<0.17188.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17276.1> [ns_server:debug,2014-08-19T16:51:07.994,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:07.994,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:07.994,ns_1@10.242.238.88:<0.15384.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 898 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:debug,2014-08-19T16:51:07.994,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{659, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:51:07.994,ns_1@10.242.238.88:<0.15307.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 899 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:07.995,ns_1@10.242.238.88:<0.17188.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:51:07.995,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 898 state to active [ns_server:debug,2014-08-19T16:51:07.995,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:07.997,ns_1@10.242.238.88:<0.17188.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:07.997,ns_1@10.242.238.88:<0.17188.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:07.997,ns_1@10.242.238.88:<0.15384.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 898 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:07.997,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 899 state to active [rebalance:info,2014-08-19T16:51:07.997,ns_1@10.242.238.88:<0.14101.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 661 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:51:07.999,ns_1@10.242.238.88:<0.15307.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 899 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:07.999,ns_1@10.242.238.88:<0.15384.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:debug,2014-08-19T16:51:07.999,ns_1@10.242.238.88:<0.14109.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:08.000,ns_1@10.242.238.88:<0.15307.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:08.004,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 659 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:08.005,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 659) [ns_server:info,2014-08-19T16:51:08.005,ns_1@10.242.238.88:<0.14109.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_661_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:08.006,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:08.006,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 655 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:08.006,ns_1@10.242.238.88:<0.17294.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 655 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:08.008,ns_1@10.242.238.88:<0.17187.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_654 [rebalance:info,2014-08-19T16:51:08.009,ns_1@10.242.238.88:<0.17187.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[654]}, {checkpoints,[{654,1}]}, {name,<<"rebalance_654">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[654]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"654"}]} [rebalance:debug,2014-08-19T16:51:08.010,ns_1@10.242.238.88:<0.17187.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17295.1> [rebalance:info,2014-08-19T16:51:08.010,ns_1@10.242.238.88:<0.17187.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.012,ns_1@10.242.238.88:<0.17187.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.012,ns_1@10.242.238.88:<0.17187.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.013,ns_1@10.242.238.88:<0.14641.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 654 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.014,ns_1@10.242.238.88:<0.14649.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.017,ns_1@10.242.238.88:<0.17183.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_658 [ns_server:info,2014-08-19T16:51:08.021,ns_1@10.242.238.88:<0.14649.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_654_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.023,ns_1@10.242.238.88:<0.17183.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[658]}, {checkpoints,[{658,1}]}, {name,<<"rebalance_658">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[658]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"658"}]} [rebalance:debug,2014-08-19T16:51:08.024,ns_1@10.242.238.88:<0.17183.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17298.1> [rebalance:info,2014-08-19T16:51:08.025,ns_1@10.242.238.88:<0.17183.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:51:08.025,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.026,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:08.026,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{655, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.026,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:08.027,ns_1@10.242.238.88:<0.17183.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [ns_server:debug,2014-08-19T16:51:08.027,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:08.027,ns_1@10.242.238.88:<0.17183.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.028,ns_1@10.242.238.88:<0.14347.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 658 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.030,ns_1@10.242.238.88:<0.14355.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:08.033,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 655 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:08.034,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 655) [ns_server:debug,2014-08-19T16:51:08.034,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:08.035,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 660 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:08.035,ns_1@10.242.238.88:<0.17317.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 660 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:info,2014-08-19T16:51:08.037,ns_1@10.242.238.88:<0.14355.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_658_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:08.039,ns_1@10.242.238.88:<0.17185.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_657 [rebalance:info,2014-08-19T16:51:08.040,ns_1@10.242.238.88:<0.17185.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[657]}, {checkpoints,[{657,1}]}, {name,<<"rebalance_657">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[657]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"657"}]} [rebalance:debug,2014-08-19T16:51:08.041,ns_1@10.242.238.88:<0.17185.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17326.1> [rebalance:info,2014-08-19T16:51:08.041,ns_1@10.242.238.88:<0.17185.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.043,ns_1@10.242.238.88:<0.17185.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.043,ns_1@10.242.238.88:<0.17185.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.044,ns_1@10.242.238.88:<0.14424.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 657 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:08.044,ns_1@10.242.238.88:<0.17186.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_656 [rebalance:info,2014-08-19T16:51:08.045,ns_1@10.242.238.88:<0.17186.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[656]}, {checkpoints,[{656,1}]}, {name,<<"rebalance_656">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[656]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"656"}]} [rebalance:debug,2014-08-19T16:51:08.045,ns_1@10.242.238.88:<0.14432.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:08.046,ns_1@10.242.238.88:<0.17186.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17327.1> [rebalance:info,2014-08-19T16:51:08.047,ns_1@10.242.238.88:<0.17186.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.048,ns_1@10.242.238.88:<0.17186.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.048,ns_1@10.242.238.88:<0.17186.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:51:08.049,ns_1@10.242.238.88:<0.14432.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_657_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.049,ns_1@10.242.238.88:<0.14495.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 656 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.051,ns_1@10.242.238.88:<0.14509.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.052,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.053,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.053,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{660, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.054,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:08.054,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:51:08.055,ns_1@10.242.238.88:<0.14509.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_656_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.066,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 660 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:08.067,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 660) [ns_server:debug,2014-08-19T16:51:08.067,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:08.067,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 652 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:08.068,ns_1@10.242.238.88:<0.17342.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 652 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:08.084,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.085,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:08.085,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.085,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.085,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{652, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:51:08.092,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 652 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:08.093,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 652) [ns_server:debug,2014-08-19T16:51:08.093,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:08.093,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 661 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:08.094,ns_1@10.242.238.88:<0.17353.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 661 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:08.111,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:08.111,ns_1@10.242.238.88:<0.15150.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 901 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:08.111,ns_1@10.242.238.88:<0.15230.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 900 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:debug,2014-08-19T16:51:08.112,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{661, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.112,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:51:08.112,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 901 state to active [ns_server:debug,2014-08-19T16:51:08.113,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.113,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:51:08.114,ns_1@10.242.238.88:<0.15150.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 901 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:08.115,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 900 state to active [rebalance:info,2014-08-19T16:51:08.116,ns_1@10.242.238.88:<0.15230.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 900 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:08.117,ns_1@10.242.238.88:<0.15150.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:08.117,ns_1@10.242.238.88:<0.15230.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:08.119,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 661 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:08.120,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 661) [ns_server:debug,2014-08-19T16:51:08.120,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:08.120,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 654 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:08.121,ns_1@10.242.238.88:<0.17371.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 654 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:08.129,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 47. Nacking mccouch update. [views:debug,2014-08-19T16:51:08.129,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/47. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:08.129,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",47,active,0} [ns_server:debug,2014-08-19T16:51:08.131,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846,535, 480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351, 896,585,274,219,819,764,453,142,998,687,65,376,921,610,299,244,844,533,478, 167,895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997,869,686,558, 503,375,192,920,792,737,609,426,298,243,115,971,843,660,532,477,349,166,894, 89,711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,63,557, 502,374,191,919,791,736,608,425,297,242,114,970,842,659,531,476,348,165,893, 710,582,399,271,216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373, 190,918,790,735,607,424,296,241,113,969,841,658,530,475,347,164,892,87,709, 581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,61,555,500,372, 189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,163,891,708,580, 397,269,214,1019,942,814,759,631,448,320,137,993,865,682,554,499,371,188,916, 788,733,605,422,294,239,111,967,839,656,528,473,345,162,890,85,707,579,396, 268,213,1018,941,813,758,630,447,319,136,992,864,681,59,553,498,370,187,915, 787,732,604,421,293,238,110,966,838,655,527,472,344,161,889,706,578,395,267, 212,1017,940,812,757,629,446,318,135,991,863,680,552,497,369,186,914,786,731, 603,420,292,237,109,965,837,654,526,471,343,160,888,83,705,577,394,266,211, 1016,939,811,756,628,445,317,134,990,862,679,57,551,496,368,185,913,785,730, 602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393,265,210,1015, 938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784,729,601,418, 290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937, 809,754,626,443,315,132,988,860,677,55,549,494,366,183,911,783,728,600,417, 289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808,753, 625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105, 961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,53,492,364,181,909,781,726,598,415,287,232,960, 832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312, 129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648, 520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673,51, 362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489, 178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,49, 360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487, 176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,47, 358,903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485, 174,97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,356, 901,590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172, 95,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,354,899, 588,277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93, 770,715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,352,897,586, 275,220,820,765,454,143,999,688,377,922,611,300,245,845,534,479,168,91,768, 713,402,947,636,325,870,559,504,193,793,738,427,116,972,661,350] [ns_server:debug,2014-08-19T16:51:08.138,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.139,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.140,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{654, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.140,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:08.140,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:08.149,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 654 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:08.150,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 654) [ns_server:debug,2014-08-19T16:51:08.151,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:08.151,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 658 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:08.151,ns_1@10.242.238.88:<0.17382.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 658 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:08.169,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.170,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.170,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:08.170,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{658, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.170,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:08.180,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 658 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:08.180,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 658) [ns_server:debug,2014-08-19T16:51:08.181,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:08.181,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 657 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:08.181,ns_1@10.242.238.88:<0.17393.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 657 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:08.199,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.200,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:08.200,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.200,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{657, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.200,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:51:08.204,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/47. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:08.204,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",47,active,0} [rebalance:info,2014-08-19T16:51:08.208,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 657 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:08.209,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 657) [ns_server:debug,2014-08-19T16:51:08.209,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:08.210,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 656 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:08.210,ns_1@10.242.238.88:<0.17404.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 656 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:info,2014-08-19T16:51:08.233,ns_1@10.242.238.88:<0.15073.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 902 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:08.233,ns_1@10.242.238.88:<0.14991.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 903 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:08.233,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 902 state to active [ns_server:debug,2014-08-19T16:51:08.235,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:08.235,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:08.235,ns_1@10.242.238.88:<0.15073.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 902 on ns_1@10.242.238.88 [ns_server:debug,2014-08-19T16:51:08.235,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:51:08.235,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 903 state to active [ns_server:debug,2014-08-19T16:51:08.236,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.236,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{656, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:51:08.238,ns_1@10.242.238.88:<0.14991.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 903 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:08.238,ns_1@10.242.238.88:<0.15073.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:08.238,ns_1@10.242.238.88:<0.14991.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:08.248,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 656 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:08.248,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 656) [ns_server:debug,2014-08-19T16:51:08.249,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:08.346,ns_1@10.242.238.88:<0.14823.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 905 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:08.346,ns_1@10.242.238.88:<0.14900.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 904 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:08.346,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 905 state to active [rebalance:info,2014-08-19T16:51:08.348,ns_1@10.242.238.88:<0.14823.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 905 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:08.348,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 904 state to active [rebalance:info,2014-08-19T16:51:08.349,ns_1@10.242.238.88:<0.14900.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 904 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:08.350,ns_1@10.242.238.88:<0.14823.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:08.350,ns_1@10.242.238.88:<0.14900.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 45. Nacking mccouch update. [views:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/45. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:08.388,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",45,active,0} [ns_server:debug,2014-08-19T16:51:08.391,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846,535, 480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351, 896,585,274,219,819,764,453,142,998,687,65,376,921,610,299,244,844,533,478, 167,895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997,869,686,558, 503,375,192,920,792,737,609,426,298,243,115,971,843,660,532,477,349,166,894, 89,711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,63,557, 502,374,191,919,791,736,608,425,297,242,114,970,842,659,531,476,348,165,893, 710,582,399,271,216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373, 190,918,790,735,607,424,296,241,113,969,841,658,530,475,347,164,892,87,709, 581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,61,555,500,372, 189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,163,891,708,580, 397,269,214,1019,942,814,759,631,448,320,137,993,865,682,554,499,371,188,916, 788,733,605,422,294,239,111,967,839,656,528,473,345,162,890,85,707,579,396, 268,213,1018,941,813,758,630,447,319,136,992,864,681,59,553,498,370,187,915, 787,732,604,421,293,238,110,966,838,655,527,472,344,161,889,706,578,395,267, 212,1017,940,812,757,629,446,318,135,991,863,680,552,497,369,186,914,786,731, 603,420,292,237,109,965,837,654,526,471,343,160,888,83,705,577,394,266,211, 1016,939,811,756,628,445,317,134,990,862,679,57,551,496,368,185,913,785,730, 602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393,265,210,1015, 938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784,729,601,418, 290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937, 809,754,626,443,315,132,988,860,677,55,549,494,366,183,911,783,728,600,417, 289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808,753, 625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105, 961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,53,492,364,181,909,781,726,598,415,287,232,960, 832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312, 129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648, 520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673,51, 362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489, 178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,49, 360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487, 176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,47, 358,903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485, 174,97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,45, 356,901,590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483, 172,95,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,354, 899,588,277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170, 93,770,715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,352,897, 586,275,220,820,765,454,143,999,688,377,922,611,300,245,845,534,479,168,91, 768,713,402,947,636,325,870,559,504,193,793,738,427,116,972,661,350] [views:debug,2014-08-19T16:51:08.438,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/45. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:08.438,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",45,active,0} [rebalance:info,2014-08-19T16:51:08.463,ns_1@10.242.238.88:<0.17446.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 906) [rebalance:info,2014-08-19T16:51:08.463,ns_1@10.242.238.88:<0.17447.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 907) [rebalance:info,2014-08-19T16:51:08.464,ns_1@10.242.238.88:<0.14760.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:08.464,ns_1@10.242.238.88:<0.17448.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 908) [rebalance:info,2014-08-19T16:51:08.464,ns_1@10.242.238.88:<0.17449.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 910) [rebalance:info,2014-08-19T16:51:08.464,ns_1@10.242.238.88:<0.14683.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:08.465,ns_1@10.242.238.88:<0.17450.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 912) [rebalance:info,2014-08-19T16:51:08.465,ns_1@10.242.238.88:<0.17451.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 911) [rebalance:info,2014-08-19T16:51:08.465,ns_1@10.242.238.88:<0.14620.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:08.465,ns_1@10.242.238.88:<0.17452.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 914) [rebalance:info,2014-08-19T16:51:08.466,ns_1@10.242.238.88:<0.17453.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 386) [rebalance:info,2014-08-19T16:51:08.466,ns_1@10.242.238.88:<0.14466.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:08.466,ns_1@10.242.238.88:<0.17455.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 913) [rebalance:info,2014-08-19T16:51:08.466,ns_1@10.242.238.88:<0.17454.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 909) [rebalance:info,2014-08-19T16:51:08.466,ns_1@10.242.238.88:<0.14312.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:08.466,ns_1@10.242.238.88:<0.17456.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 915) [rebalance:info,2014-08-19T16:51:08.466,ns_1@10.242.238.88:<0.17458.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 387) [rebalance:info,2014-08-19T16:51:08.466,ns_1@10.242.238.88:<0.17457.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 384) [rebalance:info,2014-08-19T16:51:08.466,ns_1@10.242.238.88:<0.17459.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 385) [rebalance:info,2014-08-19T16:51:08.466,ns_1@10.242.238.88:<0.14395.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:08.466,ns_1@10.242.238.88:<0.17460.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 389) [rebalance:info,2014-08-19T16:51:08.466,ns_1@10.242.238.88:<0.17461.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 391) [rebalance:info,2014-08-19T16:51:08.466,ns_1@10.242.238.88:<0.17462.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 390) [rebalance:info,2014-08-19T16:51:08.466,ns_1@10.242.238.88:<0.17463.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 388) [rebalance:info,2014-08-19T16:51:08.467,ns_1@10.242.238.88:<0.17464.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 393) [rebalance:info,2014-08-19T16:51:08.467,ns_1@10.242.238.88:<0.17465.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 395) [rebalance:info,2014-08-19T16:51:08.467,ns_1@10.242.238.88:<0.14157.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:08.467,ns_1@10.242.238.88:<0.17466.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 394) [rebalance:info,2014-08-19T16:51:08.467,ns_1@10.242.238.88:<0.17467.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 642) [rebalance:info,2014-08-19T16:51:08.467,ns_1@10.242.238.88:<0.17468.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 644) [rebalance:info,2014-08-19T16:51:08.467,ns_1@10.242.238.88:<0.17470.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 640) [rebalance:info,2014-08-19T16:51:08.467,ns_1@10.242.238.88:<0.17469.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 646) [rebalance:info,2014-08-19T16:51:08.467,ns_1@10.242.238.88:<0.14243.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:08.467,ns_1@10.242.238.88:<0.17471.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 392) [rebalance:info,2014-08-19T16:51:08.467,ns_1@10.242.238.88:<0.14080.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:08.467,ns_1@10.242.238.88:<0.17473.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 641) [rebalance:info,2014-08-19T16:51:08.467,ns_1@10.242.238.88:<0.17472.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 643) [rebalance:info,2014-08-19T16:51:08.467,ns_1@10.242.238.88:<0.17474.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 645) [rebalance:info,2014-08-19T16:51:08.467,ns_1@10.242.238.88:<0.14543.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:08.468,ns_1@10.242.238.88:<0.17475.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 896) [rebalance:info,2014-08-19T16:51:08.468,ns_1@10.242.238.88:<0.17478.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 397) [rebalance:info,2014-08-19T16:51:08.468,ns_1@10.242.238.88:<0.17477.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 649) [rebalance:info,2014-08-19T16:51:08.468,ns_1@10.242.238.88:<0.17479.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 647) [rebalance:info,2014-08-19T16:51:08.468,ns_1@10.242.238.88:<0.17480.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 650) [rebalance:info,2014-08-19T16:51:08.468,ns_1@10.242.238.88:<0.17481.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 648) [rebalance:info,2014-08-19T16:51:08.468,ns_1@10.242.238.88:<0.17482.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 897) [rebalance:info,2014-08-19T16:51:08.468,ns_1@10.242.238.88:<0.17483.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 900) [rebalance:info,2014-08-19T16:51:08.468,ns_1@10.242.238.88:<0.15545.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:08.468,ns_1@10.242.238.88:<0.17484.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 651) [rebalance:info,2014-08-19T16:51:08.468,ns_1@10.242.238.88:<0.17486.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 898) [rebalance:info,2014-08-19T16:51:08.468,ns_1@10.242.238.88:<0.17487.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 901) [rebalance:info,2014-08-19T16:51:08.468,ns_1@10.242.238.88:<0.15405.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:08.468,ns_1@10.242.238.88:<0.17488.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 899) [rebalance:info,2014-08-19T16:51:08.468,ns_1@10.242.238.88:<0.15657.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [ns_server:info,2014-08-19T16:51:08.468,ns_1@10.242.238.88:<0.14768.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_906_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:08.468,ns_1@10.242.238.88:<0.17491.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 904) [rebalance:info,2014-08-19T16:51:08.468,ns_1@10.242.238.88:<0.17490.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 902) [rebalance:info,2014-08-19T16:51:08.468,ns_1@10.242.238.88:<0.17492.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 905) [rebalance:info,2014-08-19T16:51:08.469,ns_1@10.242.238.88:<0.15251.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:08.469,ns_1@10.242.238.88:<0.15524.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:51:08.469,ns_1@10.242.238.88:<0.14691.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_907_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:08.469,ns_1@10.242.238.88:<0.14760.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:51:08.469,ns_1@10.242.238.88:<0.17494.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 903) [rebalance:info,2014-08-19T16:51:08.469,ns_1@10.242.238.88:<0.15468.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:08.469,ns_1@10.242.238.88:<0.14683.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:51:08.469,ns_1@10.242.238.88:<0.15615.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:08.469,ns_1@10.242.238.88:<0.15052.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:08.469,ns_1@10.242.238.88:<0.15447.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:08.469,ns_1@10.242.238.88:<0.15583.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:08.469,ns_1@10.242.238.88:<0.15230.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:08.469,ns_1@10.242.238.88:<0.15017.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:08.469,ns_1@10.242.238.88:<0.15679.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:08.469,ns_1@10.242.238.88:<0.15328.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:08.469,ns_1@10.242.238.88:<0.14739.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:08.470,ns_1@10.242.238.88:<0.15503.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:08.470,ns_1@10.242.238.88:<0.15384.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:08.470,ns_1@10.242.238.88:<0.15188.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:08.470,ns_1@10.242.238.88:<0.15129.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:08.470,ns_1@10.242.238.88:<0.15150.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:08.470,ns_1@10.242.238.88:<0.15636.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:08.470,ns_1@10.242.238.88:<0.14935.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:08.470,ns_1@10.242.238.88:<0.14823.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:08.470,ns_1@10.242.238.88:<0.15094.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:08.471,ns_1@10.242.238.88:<0.14858.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:08.471,ns_1@10.242.238.88:<0.14879.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:08.471,ns_1@10.242.238.88:<0.14900.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:08.471,ns_1@10.242.238.88:<0.15073.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:08.471,ns_1@10.242.238.88:<0.15307.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:08.471,ns_1@10.242.238.88:<0.15349.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:08.471,ns_1@10.242.238.88:<0.15209.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:08.471,ns_1@10.242.238.88:<0.14991.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:51:08.471,ns_1@10.242.238.88:<0.14628.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_908_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:08.472,ns_1@10.242.238.88:<0.14620.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:51:08.472,ns_1@10.242.238.88:<0.15286.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:08.472,ns_1@10.242.238.88:<0.15426.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:08.472,ns_1@10.242.238.88:<0.14956.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [ns_server:info,2014-08-19T16:51:08.474,ns_1@10.242.238.88:<0.14474.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_910_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:08.474,ns_1@10.242.238.88:<0.14466.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.474,ns_1@10.242.238.88:<0.14405.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_911_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:08.475,ns_1@10.242.238.88:<0.14395.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.475,ns_1@10.242.238.88:<0.14325.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_912_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:08.475,ns_1@10.242.238.88:<0.14312.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.479,ns_1@10.242.238.88:<0.14165.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_914_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:08.479,ns_1@10.242.238.88:<0.14157.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.483,ns_1@10.242.238.88:<0.14088.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_915_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:08.484,ns_1@10.242.238.88:<0.14080.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.485,ns_1@10.242.238.88:<0.14251.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_913_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:08.486,ns_1@10.242.238.88:<0.14243.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.490,ns_1@10.242.238.88:<0.14551.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_909_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:08.490,ns_1@10.242.238.88:<0.14543.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.492,ns_1@10.242.238.88:<0.15553.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_642_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:08.492,ns_1@10.242.238.88:<0.15545.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.493,ns_1@10.242.238.88:<0.15532.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_896_'ns_1@10.242.238.91'">>] [ns_server:info,2014-08-19T16:51:08.493,ns_1@10.242.238.88:<0.15259.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_646_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:08.493,ns_1@10.242.238.88:<0.15524.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:51:08.493,ns_1@10.242.238.88:<0.15251.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.494,ns_1@10.242.238.88:<0.15665.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_640_'ns_1@10.242.238.90'">>] [ns_server:info,2014-08-19T16:51:08.494,ns_1@10.242.238.88:<0.15413.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_644_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:08.494,ns_1@10.242.238.88:<0.15657.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:51:08.494,ns_1@10.242.238.88:<0.15405.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.495,ns_1@10.242.238.88:<0.15480.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_643_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:08.495,ns_1@10.242.238.88:<0.15468.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.495,ns_1@10.242.238.88:<0.15455.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_897_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:08.495,ns_1@10.242.238.88:<0.15447.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.495,ns_1@10.242.238.88:<0.15238.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_900_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:08.496,ns_1@10.242.238.88:<0.15230.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.496,ns_1@10.242.238.88:<0.15623.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_641_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:08.496,ns_1@10.242.238.88:<0.15615.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.496,ns_1@10.242.238.88:<0.15025.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_649_'ns_1@10.242.238.90'">>] [ns_server:info,2014-08-19T16:51:08.497,ns_1@10.242.238.88:<0.15060.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_393_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.497,ns_1@10.242.238.88:<0.15017.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:51:08.497,ns_1@10.242.238.88:<0.15052.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.497,ns_1@10.242.238.88:<0.15336.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_645_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:08.497,ns_1@10.242.238.88:<0.15328.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.497,ns_1@10.242.238.88:<0.15593.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_386_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.498,ns_1@10.242.238.88:<0.15583.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.498,ns_1@10.242.238.88:<0.15196.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_647_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:08.498,ns_1@10.242.238.88:<0.15188.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.499,ns_1@10.242.238.88:<0.15687.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_384_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.499,ns_1@10.242.238.88:<0.15679.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.499,ns_1@10.242.238.88:<0.15392.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_898_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:08.499,ns_1@10.242.238.88:<0.15384.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.499,ns_1@10.242.238.88:<0.15158.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_901_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:08.499,ns_1@10.242.238.88:<0.15150.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.500,ns_1@10.242.238.88:<0.14747.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_397_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.500,ns_1@10.242.238.88:<0.14739.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.500,ns_1@10.242.238.88:<0.15511.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_387_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.500,ns_1@10.242.238.88:<0.15503.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.501,ns_1@10.242.238.88:<0.14943.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_650_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:08.501,ns_1@10.242.238.88:<0.14935.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.501,ns_1@10.242.238.88:<0.14831.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_905_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:08.501,ns_1@10.242.238.88:<0.14823.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.501,ns_1@10.242.238.88:<0.15137.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_392_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.502,ns_1@10.242.238.88:<0.15129.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.502,ns_1@10.242.238.88:<0.15102.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_648_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:08.502,ns_1@10.242.238.88:<0.15094.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.502,ns_1@10.242.238.88:<0.14866.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_651_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:08.502,ns_1@10.242.238.88:<0.14858.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.503,ns_1@10.242.238.88:<0.15081.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_902_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:08.503,ns_1@10.242.238.88:<0.15073.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.504,ns_1@10.242.238.88:<0.14908.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_904_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:08.504,ns_1@10.242.238.88:<0.14900.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.504,ns_1@10.242.238.88:<0.15644.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_385_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.504,ns_1@10.242.238.88:<0.15636.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.504,ns_1@10.242.238.88:<0.15315.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_899_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:08.504,ns_1@10.242.238.88:<0.15307.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.505,ns_1@10.242.238.88:<0.14999.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_903_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:08.506,ns_1@10.242.238.88:<0.14991.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.506,ns_1@10.242.238.88:<0.14887.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_395_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.506,ns_1@10.242.238.88:<0.14879.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.506,ns_1@10.242.238.88:<0.15357.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_389_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.506,ns_1@10.242.238.88:<0.15349.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.506,ns_1@10.242.238.88:<0.15217.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_391_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.507,ns_1@10.242.238.88:<0.15209.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.507,ns_1@10.242.238.88:<0.15294.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_390_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.507,ns_1@10.242.238.88:<0.15286.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:51:08.507,ns_1@10.242.238.88:<0.14760.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 906 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.17511.1> [ns_server:info,2014-08-19T16:51:08.507,ns_1@10.242.238.88:<0.15434.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_388_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:08.508,ns_1@10.242.238.88:<0.14683.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 907 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.17512.1> [rebalance:info,2014-08-19T16:51:08.508,ns_1@10.242.238.88:<0.15426.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.508,ns_1@10.242.238.88:<0.14964.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_394_'ns_1@10.242.238.89'">>] [ns_server:info,2014-08-19T16:51:08.508,ns_1@10.242.238.88:<0.17511.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 906 to state replica [rebalance:info,2014-08-19T16:51:08.508,ns_1@10.242.238.88:<0.14956.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:08.508,ns_1@10.242.238.88:<0.17512.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 907 to state replica [ns_server:debug,2014-08-19T16:51:08.519,ns_1@10.242.238.88:<0.14620.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 908 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.17583.1> [ns_server:info,2014-08-19T16:51:08.520,ns_1@10.242.238.88:<0.17583.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 908 to state replica [ns_server:debug,2014-08-19T16:51:08.523,ns_1@10.242.238.88:<0.14466.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 910 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.17598.1> [ns_server:debug,2014-08-19T16:51:08.525,ns_1@10.242.238.88:<0.14395.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 911 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.17599.1> [ns_server:info,2014-08-19T16:51:08.525,ns_1@10.242.238.88:<0.17598.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 910 to state replica [ns_server:debug,2014-08-19T16:51:08.525,ns_1@10.242.238.88:<0.14312.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 912 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.17600.1> [ns_server:info,2014-08-19T16:51:08.527,ns_1@10.242.238.88:<0.17599.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 911 to state replica [ns_server:info,2014-08-19T16:51:08.527,ns_1@10.242.238.88:<0.17600.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 912 to state replica [ns_server:debug,2014-08-19T16:51:08.531,ns_1@10.242.238.88:<0.14157.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 914 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.17601.1> [ns_server:info,2014-08-19T16:51:08.533,ns_1@10.242.238.88:<0.17601.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 914 to state replica [ns_server:debug,2014-08-19T16:51:08.533,ns_1@10.242.238.88:<0.17511.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_906 [rebalance:info,2014-08-19T16:51:08.538,ns_1@10.242.238.88:<0.17511.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[906]}, {checkpoints,[{906,1}]}, {name,<<"rebalance_906">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[906]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"906"}]} [rebalance:debug,2014-08-19T16:51:08.539,ns_1@10.242.238.88:<0.17511.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17626.1> [ns_server:debug,2014-08-19T16:51:08.540,ns_1@10.242.238.88:<0.14080.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 915 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.17602.1> [ns_server:debug,2014-08-19T16:51:08.542,ns_1@10.242.238.88:<0.14243.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 913 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.17603.1> [ns_server:info,2014-08-19T16:51:08.543,ns_1@10.242.238.88:<0.17603.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 913 to state replica [ns_server:info,2014-08-19T16:51:08.543,ns_1@10.242.238.88:<0.17602.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 915 to state replica [ns_server:debug,2014-08-19T16:51:08.544,ns_1@10.242.238.88:<0.14543.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 909 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.17604.1> [rebalance:info,2014-08-19T16:51:08.547,ns_1@10.242.238.88:<0.17511.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:51:08.547,ns_1@10.242.238.88:<0.15545.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 642 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17605.1> [ns_server:debug,2014-08-19T16:51:08.547,ns_1@10.242.238.88:<0.15251.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 646 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17606.1> [ns_server:debug,2014-08-19T16:51:08.548,ns_1@10.242.238.88:<0.15524.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 896 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.17607.1> [ns_server:debug,2014-08-19T16:51:08.548,ns_1@10.242.238.88:<0.15657.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 640 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17608.1> [ns_server:debug,2014-08-19T16:51:08.548,ns_1@10.242.238.88:<0.15447.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 897 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.17611.1> [ns_server:debug,2014-08-19T16:51:08.548,ns_1@10.242.238.88:<0.15405.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 644 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17609.1> [ns_server:debug,2014-08-19T16:51:08.548,ns_1@10.242.238.88:<0.15615.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 641 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17613.1> [ns_server:info,2014-08-19T16:51:08.548,ns_1@10.242.238.88:<0.17604.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 909 to state replica [ns_server:debug,2014-08-19T16:51:08.548,ns_1@10.242.238.88:<0.15017.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 649 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17614.1> [ns_server:debug,2014-08-19T16:51:08.548,ns_1@10.242.238.88:<0.15583.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 386 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.17616.1> [ns_server:debug,2014-08-19T16:51:08.548,ns_1@10.242.238.88:<0.15328.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 645 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17617.1> [ns_server:debug,2014-08-19T16:51:08.549,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 43. Nacking mccouch update. [ns_server:debug,2014-08-19T16:51:08.548,ns_1@10.242.238.88:<0.15052.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 393 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.17615.1> [ns_server:debug,2014-08-19T16:51:08.549,ns_1@10.242.238.88:<0.15230.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 900 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.17612.1> [ns_server:debug,2014-08-19T16:51:08.549,ns_1@10.242.238.88:<0.15188.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 647 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17618.1> [ns_server:debug,2014-08-19T16:51:08.549,ns_1@10.242.238.88:<0.15468.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 643 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17610.1> [ns_server:debug,2014-08-19T16:51:08.549,ns_1@10.242.238.88:<0.15384.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 898 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.17619.1> [views:debug,2014-08-19T16:51:08.549,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/43. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:08.549,ns_1@10.242.238.88:<0.15679.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 384 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.17621.1> [ns_server:debug,2014-08-19T16:51:08.549,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",43,active,0} [ns_server:debug,2014-08-19T16:51:08.549,ns_1@10.242.238.88:<0.15150.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 901 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.17620.1> [ns_server:info,2014-08-19T16:51:08.549,ns_1@10.242.238.88:<0.17608.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 640 to state replica [ns_server:debug,2014-08-19T16:51:08.549,ns_1@10.242.238.88:<0.14935.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 650 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17622.1> [ns_server:info,2014-08-19T16:51:08.549,ns_1@10.242.238.88:<0.17611.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 897 to state replica [ns_server:debug,2014-08-19T16:51:08.549,ns_1@10.242.238.88:<0.15503.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 387 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.17623.1> [ns_server:info,2014-08-19T16:51:08.549,ns_1@10.242.238.88:<0.17606.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 646 to state replica [ns_server:debug,2014-08-19T16:51:08.549,ns_1@10.242.238.88:<0.15129.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 392 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.17625.1> [ns_server:info,2014-08-19T16:51:08.549,ns_1@10.242.238.88:<0.17612.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 900 to state replica [ns_server:info,2014-08-19T16:51:08.550,ns_1@10.242.238.88:<0.17605.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 642 to state replica [ns_server:info,2014-08-19T16:51:08.550,ns_1@10.242.238.88:<0.17609.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 644 to state replica [ns_server:debug,2014-08-19T16:51:08.550,ns_1@10.242.238.88:<0.14739.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 397 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.17624.1> [ns_server:debug,2014-08-19T16:51:08.550,ns_1@10.242.238.88:<0.14823.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 905 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.17627.1> [ns_server:debug,2014-08-19T16:51:08.550,ns_1@10.242.238.88:<0.15094.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 648 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17628.1> [ns_server:info,2014-08-19T16:51:08.550,ns_1@10.242.238.88:<0.17613.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 641 to state replica [ns_server:info,2014-08-19T16:51:08.550,ns_1@10.242.238.88:<0.17610.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 643 to state replica [ns_server:info,2014-08-19T16:51:08.550,ns_1@10.242.238.88:<0.17617.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 645 to state replica [ns_server:info,2014-08-19T16:51:08.550,ns_1@10.242.238.88:<0.17607.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 896 to state replica [ns_server:debug,2014-08-19T16:51:08.550,ns_1@10.242.238.88:<0.15073.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 902 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.17629.1> [ns_server:info,2014-08-19T16:51:08.550,ns_1@10.242.238.88:<0.17621.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 384 to state replica [ns_server:info,2014-08-19T16:51:08.550,ns_1@10.242.238.88:<0.17618.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 647 to state replica [ns_server:debug,2014-08-19T16:51:08.550,ns_1@10.242.238.88:<0.17512.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_907 [ns_server:debug,2014-08-19T16:51:08.550,ns_1@10.242.238.88:<0.15636.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 385 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.17632.1> [ns_server:info,2014-08-19T16:51:08.550,ns_1@10.242.238.88:<0.17616.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 386 to state replica [ns_server:info,2014-08-19T16:51:08.550,ns_1@10.242.238.88:<0.17614.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 649 to state replica [ns_server:info,2014-08-19T16:51:08.550,ns_1@10.242.238.88:<0.17615.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 393 to state replica [ns_server:debug,2014-08-19T16:51:08.550,ns_1@10.242.238.88:<0.14991.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 903 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.17634.1> [ns_server:debug,2014-08-19T16:51:08.550,ns_1@10.242.238.88:<0.14879.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 395 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.17633.1> [ns_server:debug,2014-08-19T16:51:08.550,ns_1@10.242.238.88:<0.15307.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 899 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.17635.1> [ns_server:debug,2014-08-19T16:51:08.551,ns_1@10.242.238.88:<0.14858.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 651 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.17631.1> [ns_server:debug,2014-08-19T16:51:08.551,ns_1@10.242.238.88:<0.14900.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 904 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.17630.1> [ns_server:info,2014-08-19T16:51:08.551,ns_1@10.242.238.88:<0.17623.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 387 to state replica [ns_server:info,2014-08-19T16:51:08.551,ns_1@10.242.238.88:<0.17620.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 901 to state replica [ns_server:info,2014-08-19T16:51:08.551,ns_1@10.242.238.88:<0.17619.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 898 to state replica [ns_server:debug,2014-08-19T16:51:08.551,ns_1@10.242.238.88:<0.15209.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 391 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.17638.1> [ns_server:debug,2014-08-19T16:51:08.551,ns_1@10.242.238.88:<0.15426.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 388 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.17637.1> [ns_server:debug,2014-08-19T16:51:08.551,ns_1@10.242.238.88:<0.14956.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 394 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.17639.1> [ns_server:debug,2014-08-19T16:51:08.551,ns_1@10.242.238.88:<0.15286.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 390 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.17640.1> [ns_server:debug,2014-08-19T16:51:08.551,ns_1@10.242.238.88:<0.15349.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 389 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.17636.1> [ns_server:info,2014-08-19T16:51:08.551,ns_1@10.242.238.88:<0.17622.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 650 to state replica [rebalance:debug,2014-08-19T16:51:08.552,ns_1@10.242.238.88:<0.17511.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [ns_server:info,2014-08-19T16:51:08.553,ns_1@10.242.238.88:<0.17628.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 648 to state replica [ns_server:info,2014-08-19T16:51:08.553,ns_1@10.242.238.88:<0.17625.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 392 to state replica [ns_server:info,2014-08-19T16:51:08.553,ns_1@10.242.238.88:<0.17624.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 397 to state replica [ns_server:info,2014-08-19T16:51:08.553,ns_1@10.242.238.88:<0.17627.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 905 to state replica [rebalance:info,2014-08-19T16:51:08.553,ns_1@10.242.238.88:<0.17511.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:51:08.553,ns_1@10.242.238.88:<0.17629.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 902 to state replica [ns_server:info,2014-08-19T16:51:08.553,ns_1@10.242.238.88:<0.17637.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 388 to state replica [ns_server:info,2014-08-19T16:51:08.553,ns_1@10.242.238.88:<0.17635.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 899 to state replica [ns_server:info,2014-08-19T16:51:08.553,ns_1@10.242.238.88:<0.17631.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 651 to state replica [ns_server:info,2014-08-19T16:51:08.553,ns_1@10.242.238.88:<0.17634.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 903 to state replica [ns_server:info,2014-08-19T16:51:08.553,ns_1@10.242.238.88:<0.17630.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 904 to state replica [ns_server:info,2014-08-19T16:51:08.553,ns_1@10.242.238.88:<0.17632.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 385 to state replica [ns_server:info,2014-08-19T16:51:08.553,ns_1@10.242.238.88:<0.17638.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 391 to state replica [ns_server:info,2014-08-19T16:51:08.553,ns_1@10.242.238.88:<0.17639.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 394 to state replica [ns_server:info,2014-08-19T16:51:08.553,ns_1@10.242.238.88:<0.17636.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 389 to state replica [ns_server:info,2014-08-19T16:51:08.553,ns_1@10.242.238.88:<0.17640.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 390 to state replica [ns_server:info,2014-08-19T16:51:08.553,ns_1@10.242.238.88:<0.17633.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 395 to state replica [rebalance:info,2014-08-19T16:51:08.554,ns_1@10.242.238.88:<0.17512.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[907]}, {checkpoints,[{907,1}]}, {name,<<"rebalance_907">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[907]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"907"}]} [ns_server:debug,2014-08-19T16:51:08.552,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846,535, 480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351, 896,585,274,219,819,764,453,142,998,687,65,376,921,610,299,244,844,533,478, 167,895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997,869,686,558, 503,375,192,920,792,737,609,426,298,243,115,971,843,660,532,477,349,166,894, 89,711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,63,557, 502,374,191,919,791,736,608,425,297,242,114,970,842,659,531,476,348,165,893, 710,582,399,271,216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373, 190,918,790,735,607,424,296,241,113,969,841,658,530,475,347,164,892,87,709, 581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,61,555,500,372, 189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,163,891,708,580, 397,269,214,1019,942,814,759,631,448,320,137,993,865,682,554,499,371,188,916, 788,733,605,422,294,239,111,967,839,656,528,473,345,162,890,85,707,579,396, 268,213,1018,941,813,758,630,447,319,136,992,864,681,59,553,498,370,187,915, 787,732,604,421,293,238,110,966,838,655,527,472,344,161,889,706,578,395,267, 212,1017,940,812,757,629,446,318,135,991,863,680,552,497,369,186,914,786,731, 603,420,292,237,109,965,837,654,526,471,343,160,888,83,705,577,394,266,211, 1016,939,811,756,628,445,317,134,990,862,679,57,551,496,368,185,913,785,730, 602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393,265,210,1015, 938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784,729,601,418, 290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937, 809,754,626,443,315,132,988,860,677,55,549,494,366,183,911,783,728,600,417, 289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808,753, 625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105, 961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,53,492,364,181,909,781,726,598,415,287,232,960, 832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312, 129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648, 520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673,51, 362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489, 178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,49, 360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487, 176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,47, 358,903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485, 174,97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,45, 356,901,590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483, 172,95,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,43, 354,899,588,277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481, 170,93,770,715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,352, 897,586,275,220,820,765,454,143,999,688,377,922,611,300,245,845,534,479,168, 91,768,713,402,947,636,325,870,559,504,193,793,738,427,116,972,661,350] [rebalance:info,2014-08-19T16:51:08.554,ns_1@10.242.238.88:<0.14760.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 906 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.554,ns_1@10.242.238.88:<0.17512.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17641.1> [rebalance:info,2014-08-19T16:51:08.555,ns_1@10.242.238.88:<0.17512.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.556,ns_1@10.242.238.88:<0.14768.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:08.557,ns_1@10.242.238.88:<0.17512.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.557,ns_1@10.242.238.88:<0.17512.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.558,ns_1@10.242.238.88:<0.14683.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 907 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:info,2014-08-19T16:51:08.559,ns_1@10.242.238.88:<0.14768.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_906_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.559,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 906 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:debug,2014-08-19T16:51:08.559,ns_1@10.242.238.88:<0.14691.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:08.559,ns_1@10.242.238.88:<0.17645.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 906 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:51:08.562,ns_1@10.242.238.88:<0.17583.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_908 [ns_server:info,2014-08-19T16:51:08.563,ns_1@10.242.238.88:<0.14691.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_907_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.565,ns_1@10.242.238.88:<0.17583.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[908]}, {checkpoints,[{908,1}]}, {name,<<"rebalance_908">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[908]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"908"}]} [rebalance:debug,2014-08-19T16:51:08.565,ns_1@10.242.238.88:<0.17583.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17648.1> [rebalance:info,2014-08-19T16:51:08.566,ns_1@10.242.238.88:<0.17583.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.567,ns_1@10.242.238.88:<0.17583.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.567,ns_1@10.242.238.88:<0.17583.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.568,ns_1@10.242.238.88:<0.14620.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 908 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.570,ns_1@10.242.238.88:<0.14628.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.570,ns_1@10.242.238.88:<0.17598.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_910 [rebalance:info,2014-08-19T16:51:08.573,ns_1@10.242.238.88:<0.17598.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[910]}, {checkpoints,[{910,1}]}, {name,<<"rebalance_910">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[910]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"910"}]} [rebalance:debug,2014-08-19T16:51:08.575,ns_1@10.242.238.88:<0.17598.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17649.1> [rebalance:info,2014-08-19T16:51:08.576,ns_1@10.242.238.88:<0.17598.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:51:08.577,ns_1@10.242.238.88:<0.14628.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_908_'ns_1@10.242.238.89'">>] [rebalance:debug,2014-08-19T16:51:08.578,ns_1@10.242.238.88:<0.17598.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.578,ns_1@10.242.238.88:<0.17598.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.579,ns_1@10.242.238.88:<0.14466.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 910 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.580,ns_1@10.242.238.88:<0.14474.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:08.584,ns_1@10.242.238.88:<0.14474.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_910_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:08.584,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.584,ns_1@10.242.238.88:<0.17600.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_912 [ns_server:debug,2014-08-19T16:51:08.585,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{906, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.587,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:08.588,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.590,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:08.591,ns_1@10.242.238.88:<0.17600.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[912]}, {checkpoints,[{912,1}]}, {name,<<"rebalance_912">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[912]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"912"}]} [rebalance:debug,2014-08-19T16:51:08.592,ns_1@10.242.238.88:<0.17600.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17662.1> [rebalance:info,2014-08-19T16:51:08.593,ns_1@10.242.238.88:<0.17600.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.594,ns_1@10.242.238.88:<0.17600.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.594,ns_1@10.242.238.88:<0.17600.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.595,ns_1@10.242.238.88:<0.14312.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 912 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:info,2014-08-19T16:51:08.595,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 906 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:08.596,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 906) [ns_server:debug,2014-08-19T16:51:08.597,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:debug,2014-08-19T16:51:08.597,ns_1@10.242.238.88:<0.14325.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:08.597,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 907 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:08.597,ns_1@10.242.238.88:<0.17665.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 907 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [views:debug,2014-08-19T16:51:08.599,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/43. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:08.599,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",43,active,0} [ns_server:info,2014-08-19T16:51:08.600,ns_1@10.242.238.88:<0.14325.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_912_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:08.602,ns_1@10.242.238.88:<0.17602.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_915 [rebalance:info,2014-08-19T16:51:08.603,ns_1@10.242.238.88:<0.17602.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[915]}, {checkpoints,[{915,1}]}, {name,<<"rebalance_915">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[915]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"915"}]} [rebalance:debug,2014-08-19T16:51:08.604,ns_1@10.242.238.88:<0.17602.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17668.1> [rebalance:info,2014-08-19T16:51:08.604,ns_1@10.242.238.88:<0.17602.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.606,ns_1@10.242.238.88:<0.17602.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.606,ns_1@10.242.238.88:<0.17602.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.607,ns_1@10.242.238.88:<0.14080.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 915 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:08.607,ns_1@10.242.238.88:<0.17603.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_913 [rebalance:debug,2014-08-19T16:51:08.609,ns_1@10.242.238.88:<0.14088.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:08.609,ns_1@10.242.238.88:<0.17603.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[913]}, {checkpoints,[{913,1}]}, {name,<<"rebalance_913">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[913]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"913"}]} [rebalance:debug,2014-08-19T16:51:08.610,ns_1@10.242.238.88:<0.17603.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17669.1> [rebalance:info,2014-08-19T16:51:08.611,ns_1@10.242.238.88:<0.17603.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.612,ns_1@10.242.238.88:<0.17603.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.613,ns_1@10.242.238.88:<0.17603.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:51:08.613,ns_1@10.242.238.88:<0.14088.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_915_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.613,ns_1@10.242.238.88:<0.14243.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 913 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:08.615,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:08.615,ns_1@10.242.238.88:<0.14251.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.616,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:08.616,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{907, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.618,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.618,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:51:08.621,ns_1@10.242.238.88:<0.14251.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_913_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:08.623,ns_1@10.242.238.88:<0.17599.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_911 [rebalance:info,2014-08-19T16:51:08.624,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 907 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:51:08.625,ns_1@10.242.238.88:<0.17599.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[911]}, {checkpoints,[{911,1}]}, {name,<<"rebalance_911">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[911]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"911"}]} [ns_server:debug,2014-08-19T16:51:08.625,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 907) [rebalance:debug,2014-08-19T16:51:08.625,ns_1@10.242.238.88:<0.17599.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17683.1> [ns_server:debug,2014-08-19T16:51:08.625,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:08.625,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 908 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:08.625,ns_1@10.242.238.88:<0.17685.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 908 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:51:08.626,ns_1@10.242.238.88:<0.17599.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.627,ns_1@10.242.238.88:<0.17599.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.627,ns_1@10.242.238.88:<0.17599.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.628,ns_1@10.242.238.88:<0.14395.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 911 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.630,ns_1@10.242.238.88:<0.14405.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.631,ns_1@10.242.238.88:<0.17601.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_914 [rebalance:info,2014-08-19T16:51:08.633,ns_1@10.242.238.88:<0.17601.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[914]}, {checkpoints,[{914,1}]}, {name,<<"rebalance_914">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[914]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"914"}]} [rebalance:debug,2014-08-19T16:51:08.634,ns_1@10.242.238.88:<0.17601.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17686.1> [ns_server:info,2014-08-19T16:51:08.637,ns_1@10.242.238.88:<0.14405.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_911_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.638,ns_1@10.242.238.88:<0.17601.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.639,ns_1@10.242.238.88:<0.17601.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.640,ns_1@10.242.238.88:<0.17601.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.641,ns_1@10.242.238.88:<0.14157.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 914 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.642,ns_1@10.242.238.88:<0.14165.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.644,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.645,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.645,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:08.646,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.646,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{908, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:08.646,ns_1@10.242.238.88:<0.14165.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_914_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:08.650,ns_1@10.242.238.88:<0.17623.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_387 [rebalance:info,2014-08-19T16:51:08.651,ns_1@10.242.238.88:<0.17623.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[387]}, {checkpoints,[{387,1}]}, {name,<<"rebalance_387">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[387]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"387"}]} [rebalance:debug,2014-08-19T16:51:08.651,ns_1@10.242.238.88:<0.17623.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17698.1> [rebalance:info,2014-08-19T16:51:08.652,ns_1@10.242.238.88:<0.17623.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.654,ns_1@10.242.238.88:<0.17623.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.654,ns_1@10.242.238.88:<0.17623.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.655,ns_1@10.242.238.88:<0.15503.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 387 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:info,2014-08-19T16:51:08.656,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 908 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:08.656,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 908) [rebalance:debug,2014-08-19T16:51:08.657,ns_1@10.242.238.88:<0.15511.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.657,ns_1@10.242.238.88:<0.17632.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_385 [ns_server:debug,2014-08-19T16:51:08.657,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:08.658,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 910 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:08.658,ns_1@10.242.238.88:<0.17702.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 910 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:51:08.660,ns_1@10.242.238.88:<0.17632.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[385]}, {checkpoints,[{385,1}]}, {name,<<"rebalance_385">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[385]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"385"}]} [rebalance:debug,2014-08-19T16:51:08.661,ns_1@10.242.238.88:<0.17632.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17704.1> [ns_server:info,2014-08-19T16:51:08.661,ns_1@10.242.238.88:<0.15511.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_387_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:08.661,ns_1@10.242.238.88:<0.17632.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.663,ns_1@10.242.238.88:<0.17632.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.663,ns_1@10.242.238.88:<0.17632.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.664,ns_1@10.242.238.88:<0.15636.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 385 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:08.670,ns_1@10.242.238.88:<0.17633.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_395 [rebalance:info,2014-08-19T16:51:08.672,ns_1@10.242.238.88:<0.17633.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[395]}, {checkpoints,[{395,1}]}, {name,<<"rebalance_395">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[395]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"395"}]} [rebalance:debug,2014-08-19T16:51:08.673,ns_1@10.242.238.88:<0.17633.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17706.1> [rebalance:info,2014-08-19T16:51:08.674,ns_1@10.242.238.88:<0.17633.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.676,ns_1@10.242.238.88:<0.17633.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.676,ns_1@10.242.238.88:<0.17633.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.676,ns_1@10.242.238.88:<0.14879.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 395 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:08.678,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.678,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:debug,2014-08-19T16:51:08.678,ns_1@10.242.238.88:<0.15644.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.679,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{910, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.679,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.679,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:08.679,ns_1@10.242.238.88:<0.14887.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:08.682,ns_1@10.242.238.88:<0.15644.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_385_'ns_1@10.242.238.90'">>] [ns_server:info,2014-08-19T16:51:08.682,ns_1@10.242.238.88:<0.14887.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_395_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:51:08.684,ns_1@10.242.238.88:<0.17630.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_904 [rebalance:info,2014-08-19T16:51:08.685,ns_1@10.242.238.88:<0.17630.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[904]}, {checkpoints,[{904,1}]}, {name,<<"rebalance_904">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[904]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"904"}]} [rebalance:debug,2014-08-19T16:51:08.685,ns_1@10.242.238.88:<0.17630.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17718.1> [rebalance:info,2014-08-19T16:51:08.686,ns_1@10.242.238.88:<0.17630.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:51:08.690,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 910 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:08.691,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 910) [ns_server:debug,2014-08-19T16:51:08.692,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:08.692,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 912 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:debug,2014-08-19T16:51:08.692,ns_1@10.242.238.88:<0.17630.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.692,ns_1@10.242.238.88:<0.17721.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 912 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:51:08.692,ns_1@10.242.238.88:<0.17630.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.693,ns_1@10.242.238.88:<0.14900.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 904 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.695,ns_1@10.242.238.88:<0.14908.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:08.699,ns_1@10.242.238.88:<0.14908.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_904_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:08.700,ns_1@10.242.238.88:<0.17635.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_899 [rebalance:info,2014-08-19T16:51:08.702,ns_1@10.242.238.88:<0.17635.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[899]}, {checkpoints,[{899,1}]}, {name,<<"rebalance_899">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[899]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"899"}]} [rebalance:debug,2014-08-19T16:51:08.702,ns_1@10.242.238.88:<0.17635.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17737.1> [rebalance:info,2014-08-19T16:51:08.703,ns_1@10.242.238.88:<0.17635.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.705,ns_1@10.242.238.88:<0.17635.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.705,ns_1@10.242.238.88:<0.17635.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.706,ns_1@10.242.238.88:<0.15307.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 899 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:08.706,ns_1@10.242.238.88:<0.17627.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_905 [rebalance:debug,2014-08-19T16:51:08.707,ns_1@10.242.238.88:<0.15315.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:08.707,ns_1@10.242.238.88:<0.17627.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[905]}, {checkpoints,[{905,1}]}, {name,<<"rebalance_905">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[905]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"905"}]} [rebalance:debug,2014-08-19T16:51:08.708,ns_1@10.242.238.88:<0.17627.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17740.1> [rebalance:info,2014-08-19T16:51:08.709,ns_1@10.242.238.88:<0.17627.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.711,ns_1@10.242.238.88:<0.17627.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.711,ns_1@10.242.238.88:<0.17627.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:51:08.711,ns_1@10.242.238.88:<0.15315.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_899_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.711,ns_1@10.242.238.88:<0.14823.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 905 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:08.712,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.713,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:08.714,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{912, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.714,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.714,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:08.715,ns_1@10.242.238.88:<0.14831.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:08.718,ns_1@10.242.238.88:<0.14831.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_905_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:08.719,ns_1@10.242.238.88:<0.17640.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_390 [rebalance:info,2014-08-19T16:51:08.721,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 912 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:51:08.721,ns_1@10.242.238.88:<0.17640.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[390]}, {checkpoints,[{390,1}]}, {name,<<"rebalance_390">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[390]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"390"}]} [ns_server:debug,2014-08-19T16:51:08.721,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 912) [rebalance:debug,2014-08-19T16:51:08.721,ns_1@10.242.238.88:<0.17640.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17753.1> [ns_server:debug,2014-08-19T16:51:08.722,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:08.722,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 915 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:08.722,ns_1@10.242.238.88:<0.17640.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:51:08.722,ns_1@10.242.238.88:<0.17755.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 915 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:debug,2014-08-19T16:51:08.723,ns_1@10.242.238.88:<0.17640.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.723,ns_1@10.242.238.88:<0.17640.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.724,ns_1@10.242.238.88:<0.15286.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 390 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:08.730,ns_1@10.242.238.88:<0.17625.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_392 [rebalance:info,2014-08-19T16:51:08.731,ns_1@10.242.238.88:<0.17625.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[392]}, {checkpoints,[{392,1}]}, {name,<<"rebalance_392">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[392]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"392"}]} [rebalance:debug,2014-08-19T16:51:08.733,ns_1@10.242.238.88:<0.17625.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17757.1> [rebalance:info,2014-08-19T16:51:08.734,ns_1@10.242.238.88:<0.17625.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.735,ns_1@10.242.238.88:<0.17625.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.735,ns_1@10.242.238.88:<0.17625.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.736,ns_1@10.242.238.88:<0.15129.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 392 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:08.736,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 41. Nacking mccouch update. [views:debug,2014-08-19T16:51:08.737,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/41. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:08.737,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",41,active,0} [ns_server:debug,2014-08-19T16:51:08.739,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846,535, 480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351, 896,585,274,219,819,764,453,142,998,687,65,376,921,610,299,244,844,533,478, 167,895,712,584,401,273,218,1023,946,818,763,635,452,324,141,997,869,686,558, 503,375,192,920,792,737,609,426,298,243,115,971,843,660,532,477,349,166,894, 89,711,583,400,272,217,1022,945,817,762,634,451,323,140,996,868,685,63,557, 502,374,191,919,791,736,608,425,297,242,114,970,842,659,531,476,348,165,893, 710,582,399,271,216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373, 190,918,790,735,607,424,296,241,113,969,841,658,530,475,347,164,892,87,709, 581,398,270,215,1020,943,815,760,632,449,321,138,994,866,683,61,555,500,372, 189,917,789,734,606,423,295,240,112,968,840,657,529,474,346,163,891,708,580, 397,269,214,1019,942,814,759,631,448,320,137,993,865,682,554,499,371,188,916, 788,733,605,422,294,239,111,967,839,656,528,473,345,162,890,85,707,579,396, 268,213,1018,941,813,758,630,447,319,136,992,864,681,59,553,498,370,187,915, 787,732,604,421,293,238,110,966,838,655,527,472,344,161,889,706,578,395,267, 212,1017,940,812,757,629,446,318,135,991,863,680,552,497,369,186,914,786,731, 603,420,292,237,109,965,837,654,526,471,343,160,888,83,705,577,394,266,211, 1016,939,811,756,628,445,317,134,990,862,679,57,551,496,368,185,913,785,730, 602,419,291,236,108,964,836,653,525,470,342,159,887,704,576,393,265,210,1015, 938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784,729,601,418, 290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937, 809,754,626,443,315,132,988,860,677,55,549,494,366,183,911,783,728,600,417, 289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808,753, 625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105, 961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,53,492,364,181,909,781,726,598,415,287,232,960, 832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312, 129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648, 520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673,51, 362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489, 178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,49, 360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487, 176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,47, 358,903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485, 174,97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,45, 356,901,590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483, 172,95,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,43, 354,899,588,277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481, 170,93,770,715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,41, 352,897,586,275,220,820,765,454,143,999,688,377,922,611,300,245,845,534,479, 168,91,768,713,402,947,636,325,870,559,504,193,793,738,427,116,972,661,350] [ns_server:debug,2014-08-19T16:51:08.741,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.741,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.742,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:08.742,ns_1@10.242.238.88:<0.17613.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_641 [rebalance:debug,2014-08-19T16:51:08.742,ns_1@10.242.238.88:<0.15294.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.742,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{915, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.742,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:08.744,ns_1@10.242.238.88:<0.17613.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[641]}, {checkpoints,[{641,1}]}, {name,<<"rebalance_641">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[641]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"641"}]} [rebalance:debug,2014-08-19T16:51:08.744,ns_1@10.242.238.88:<0.15137.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:08.744,ns_1@10.242.238.88:<0.17613.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17760.1> [rebalance:info,2014-08-19T16:51:08.745,ns_1@10.242.238.88:<0.17613.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:51:08.746,ns_1@10.242.238.88:<0.15294.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_390_'ns_1@10.242.238.90'">>] [rebalance:debug,2014-08-19T16:51:08.748,ns_1@10.242.238.88:<0.17613.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.748,ns_1@10.242.238.88:<0.17613.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:51:08.749,ns_1@10.242.238.88:<0.15137.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_392_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:08.750,ns_1@10.242.238.88:<0.15615.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 641 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.751,ns_1@10.242.238.88:<0.15623.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:08.752,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 915 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:08.753,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 915) [ns_server:debug,2014-08-19T16:51:08.754,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:08.754,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 913 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:08.754,ns_1@10.242.238.88:<0.17772.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 913 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:info,2014-08-19T16:51:08.755,ns_1@10.242.238.88:<0.15623.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_641_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:08.757,ns_1@10.242.238.88:<0.17617.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_645 [rebalance:info,2014-08-19T16:51:08.758,ns_1@10.242.238.88:<0.17617.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[645]}, {checkpoints,[{645,1}]}, {name,<<"rebalance_645">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[645]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"645"}]} [rebalance:debug,2014-08-19T16:51:08.759,ns_1@10.242.238.88:<0.17617.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17775.1> [rebalance:info,2014-08-19T16:51:08.760,ns_1@10.242.238.88:<0.17617.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.761,ns_1@10.242.238.88:<0.17617.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.761,ns_1@10.242.238.88:<0.17617.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.762,ns_1@10.242.238.88:<0.15328.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 645 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.764,ns_1@10.242.238.88:<0.15336.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.765,ns_1@10.242.238.88:<0.17618.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_647 [rebalance:info,2014-08-19T16:51:08.766,ns_1@10.242.238.88:<0.17618.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[647]}, {checkpoints,[{647,1}]}, {name,<<"rebalance_647">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[647]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"647"}]} [rebalance:debug,2014-08-19T16:51:08.767,ns_1@10.242.238.88:<0.17618.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17777.1> [ns_server:info,2014-08-19T16:51:08.769,ns_1@10.242.238.88:<0.15336.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_645_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.769,ns_1@10.242.238.88:<0.17618.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [views:debug,2014-08-19T16:51:08.771,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/41. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:08.771,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",41,active,0} [rebalance:debug,2014-08-19T16:51:08.771,ns_1@10.242.238.88:<0.17618.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.772,ns_1@10.242.238.88:<0.17618.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.773,ns_1@10.242.238.88:<0.15188.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 647 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.775,ns_1@10.242.238.88:<0.15196.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.775,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.776,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.776,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:08.777,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{913, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.777,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:51:08.781,ns_1@10.242.238.88:<0.15196.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_647_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.783,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 913 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:08.784,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:08.784,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 911 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:08.784,ns_1@10.242.238.88:<0.17791.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 911 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:51:08.785,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 913) [ns_server:debug,2014-08-19T16:51:08.785,ns_1@10.242.238.88:<0.17636.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_389 [rebalance:info,2014-08-19T16:51:08.787,ns_1@10.242.238.88:<0.17636.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[389]}, {checkpoints,[{389,1}]}, {name,<<"rebalance_389">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[389]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"389"}]} [rebalance:debug,2014-08-19T16:51:08.788,ns_1@10.242.238.88:<0.17636.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17792.1> [rebalance:info,2014-08-19T16:51:08.789,ns_1@10.242.238.88:<0.17636.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.790,ns_1@10.242.238.88:<0.17636.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.790,ns_1@10.242.238.88:<0.17636.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:51:08.791,ns_1@10.242.238.88:<0.17610.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_643 [rebalance:info,2014-08-19T16:51:08.791,ns_1@10.242.238.88:<0.15349.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 389 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:info,2014-08-19T16:51:08.792,ns_1@10.242.238.88:<0.17610.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[643]}, {checkpoints,[{643,1}]}, {name,<<"rebalance_643">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[643]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"643"}]} [rebalance:debug,2014-08-19T16:51:08.793,ns_1@10.242.238.88:<0.17610.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17793.1> [rebalance:info,2014-08-19T16:51:08.794,ns_1@10.242.238.88:<0.17610.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.795,ns_1@10.242.238.88:<0.17610.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.795,ns_1@10.242.238.88:<0.17610.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.796,ns_1@10.242.238.88:<0.15468.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 643 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.798,ns_1@10.242.238.88:<0.15480.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:08.801,ns_1@10.242.238.88:<0.15480.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_643_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:08.802,ns_1@10.242.238.88:<0.17629.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_902 [rebalance:info,2014-08-19T16:51:08.804,ns_1@10.242.238.88:<0.17629.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[902]}, {checkpoints,[{902,1}]}, {name,<<"rebalance_902">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[902]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"902"}]} [rebalance:debug,2014-08-19T16:51:08.805,ns_1@10.242.238.88:<0.17629.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17797.1> [rebalance:info,2014-08-19T16:51:08.805,ns_1@10.242.238.88:<0.17629.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.807,ns_1@10.242.238.88:<0.17629.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.807,ns_1@10.242.238.88:<0.17629.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.808,ns_1@10.242.238.88:<0.15073.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 902 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:08.808,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:08.808,ns_1@10.242.238.88:<0.15357.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.809,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.809,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:08.809,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{911, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.809,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:08.809,ns_1@10.242.238.88:<0.15081.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:08.813,ns_1@10.242.238.88:<0.15357.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_389_'ns_1@10.242.238.90'">>] [ns_server:info,2014-08-19T16:51:08.814,ns_1@10.242.238.88:<0.15081.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_902_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:08.815,ns_1@10.242.238.88:<0.17609.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_644 [rebalance:info,2014-08-19T16:51:08.817,ns_1@10.242.238.88:<0.17609.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[644]}, {checkpoints,[{644,1}]}, {name,<<"rebalance_644">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[644]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"644"}]} [rebalance:info,2014-08-19T16:51:08.818,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 911 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:debug,2014-08-19T16:51:08.818,ns_1@10.242.238.88:<0.17609.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17809.1> [ns_server:debug,2014-08-19T16:51:08.818,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 911) [rebalance:info,2014-08-19T16:51:08.818,ns_1@10.242.238.88:<0.17609.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:51:08.819,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:08.819,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 914 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:08.819,ns_1@10.242.238.88:<0.17812.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 914 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:debug,2014-08-19T16:51:08.820,ns_1@10.242.238.88:<0.17609.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.820,ns_1@10.242.238.88:<0.17609.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.822,ns_1@10.242.238.88:<0.15405.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 644 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.824,ns_1@10.242.238.88:<0.15413.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:08.828,ns_1@10.242.238.88:<0.15413.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_644_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:08.830,ns_1@10.242.238.88:<0.17637.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_388 [rebalance:info,2014-08-19T16:51:08.831,ns_1@10.242.238.88:<0.17637.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[388]}, {checkpoints,[{388,1}]}, {name,<<"rebalance_388">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[388]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"388"}]} [rebalance:debug,2014-08-19T16:51:08.832,ns_1@10.242.238.88:<0.17637.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17815.1> [rebalance:info,2014-08-19T16:51:08.833,ns_1@10.242.238.88:<0.17637.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.835,ns_1@10.242.238.88:<0.17637.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.835,ns_1@10.242.238.88:<0.17637.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.837,ns_1@10.242.238.88:<0.15426.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 388 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:08.839,ns_1@10.242.238.88:<0.17604.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_909 [ns_server:debug,2014-08-19T16:51:08.842,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.842,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.842,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:51:08.842,ns_1@10.242.238.88:<0.17604.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[909]}, {checkpoints,[{909,1}]}, {name,<<"rebalance_909">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[909]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"909"}]} [ns_server:debug,2014-08-19T16:51:08.843,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.843,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{914, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:08.843,ns_1@10.242.238.88:<0.15434.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:08.843,ns_1@10.242.238.88:<0.17604.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17819.1> [rebalance:info,2014-08-19T16:51:08.844,ns_1@10.242.238.88:<0.17604.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.845,ns_1@10.242.238.88:<0.17604.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.846,ns_1@10.242.238.88:<0.17604.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:51:08.846,ns_1@10.242.238.88:<0.15434.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_388_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:08.847,ns_1@10.242.238.88:<0.14543.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 909 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.849,ns_1@10.242.238.88:<0.14551.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:08.849,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 914 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:08.850,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 914) [ns_server:debug,2014-08-19T16:51:08.850,ns_1@10.242.238.88:<0.17612.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_900 [ns_server:debug,2014-08-19T16:51:08.851,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:08.851,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 387 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:08.851,ns_1@10.242.238.88:<0.17837.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 387 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:info,2014-08-19T16:51:08.852,ns_1@10.242.238.88:<0.17612.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[900]}, {checkpoints,[{900,1}]}, {name,<<"rebalance_900">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[900]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"900"}]} [rebalance:debug,2014-08-19T16:51:08.852,ns_1@10.242.238.88:<0.17612.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17838.1> [ns_server:info,2014-08-19T16:51:08.853,ns_1@10.242.238.88:<0.14551.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_909_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.853,ns_1@10.242.238.88:<0.17612.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.855,ns_1@10.242.238.88:<0.17612.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.856,ns_1@10.242.238.88:<0.17612.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.856,ns_1@10.242.238.88:<0.15230.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 900 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.858,ns_1@10.242.238.88:<0.15238.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:08.861,ns_1@10.242.238.88:<0.15238.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_900_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:08.864,ns_1@10.242.238.88:<0.17616.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_386 [rebalance:info,2014-08-19T16:51:08.865,ns_1@10.242.238.88:<0.17616.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[386]}, {checkpoints,[{386,1}]}, {name,<<"rebalance_386">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[386]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"386"}]} [rebalance:debug,2014-08-19T16:51:08.866,ns_1@10.242.238.88:<0.17616.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17850.1> [rebalance:info,2014-08-19T16:51:08.866,ns_1@10.242.238.88:<0.17616.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.868,ns_1@10.242.238.88:<0.17616.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.868,ns_1@10.242.238.88:<0.17616.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.869,ns_1@10.242.238.88:<0.15583.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 386 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.871,ns_1@10.242.238.88:<0.15593.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.873,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.874,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:51:08.874,ns_1@10.242.238.88:<0.15593.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_386_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:51:08.874,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:08.874,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{387, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.874,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.875,ns_1@10.242.238.88:<0.17639.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_394 [rebalance:info,2014-08-19T16:51:08.876,ns_1@10.242.238.88:<0.17639.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[394]}, {checkpoints,[{394,1}]}, {name,<<"rebalance_394">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[394]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"394"}]} [rebalance:debug,2014-08-19T16:51:08.877,ns_1@10.242.238.88:<0.17639.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17860.1> [rebalance:info,2014-08-19T16:51:08.878,ns_1@10.242.238.88:<0.17639.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.880,ns_1@10.242.238.88:<0.17639.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.880,ns_1@10.242.238.88:<0.17639.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.880,ns_1@10.242.238.88:<0.14956.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 394 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.882,ns_1@10.242.238.88:<0.14964.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:08.885,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 387 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:08.885,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 387) [ns_server:info,2014-08-19T16:51:08.885,ns_1@10.242.238.88:<0.14964.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_394_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:51:08.886,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:08.886,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 385 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:08.886,ns_1@10.242.238.88:<0.17866.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 385 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:08.887,ns_1@10.242.238.88:<0.17607.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_896 [rebalance:info,2014-08-19T16:51:08.888,ns_1@10.242.238.88:<0.17607.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[896]}, {checkpoints,[{896,1}]}, {name,<<"rebalance_896">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[896]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"896"}]} [rebalance:debug,2014-08-19T16:51:08.889,ns_1@10.242.238.88:<0.17607.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17867.1> [rebalance:info,2014-08-19T16:51:08.890,ns_1@10.242.238.88:<0.17607.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.897,ns_1@10.242.238.88:<0.17607.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.897,ns_1@10.242.238.88:<0.17607.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.898,ns_1@10.242.238.88:<0.15524.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 896 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.899,ns_1@10.242.238.88:<0.15532.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.900,ns_1@10.242.238.88:<0.17611.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_897 [rebalance:info,2014-08-19T16:51:08.901,ns_1@10.242.238.88:<0.17611.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[897]}, {checkpoints,[{897,1}]}, {name,<<"rebalance_897">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[897]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"897"}]} [rebalance:debug,2014-08-19T16:51:08.902,ns_1@10.242.238.88:<0.17611.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17868.1> [rebalance:info,2014-08-19T16:51:08.903,ns_1@10.242.238.88:<0.17611.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:51:08.903,ns_1@10.242.238.88:<0.15532.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_896_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:08.904,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.905,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:08.905,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:08.905,ns_1@10.242.238.88:<0.17611.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.905,ns_1@10.242.238.88:<0.17611.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:51:08.905,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{385, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.906,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:08.906,ns_1@10.242.238.88:<0.15447.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 897 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.908,ns_1@10.242.238.88:<0.15455.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:08.911,ns_1@10.242.238.88:<0.15455.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_897_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:08.912,ns_1@10.242.238.88:<0.17608.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_640 [rebalance:info,2014-08-19T16:51:08.912,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 385 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:08.917,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 385) [rebalance:info,2014-08-19T16:51:08.918,ns_1@10.242.238.88:<0.17608.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[640]}, {checkpoints,[{640,1}]}, {name,<<"rebalance_640">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[640]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"640"}]} [ns_server:debug,2014-08-19T16:51:08.918,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:debug,2014-08-19T16:51:08.918,ns_1@10.242.238.88:<0.17608.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17882.1> [rebalance:info,2014-08-19T16:51:08.919,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 395 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:08.919,ns_1@10.242.238.88:<0.17884.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 395 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:info,2014-08-19T16:51:08.919,ns_1@10.242.238.88:<0.17608.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.921,ns_1@10.242.238.88:<0.17608.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.921,ns_1@10.242.238.88:<0.17608.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.922,ns_1@10.242.238.88:<0.15657.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 640 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:08.925,ns_1@10.242.238.88:<0.17614.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_649 [rebalance:info,2014-08-19T16:51:08.926,ns_1@10.242.238.88:<0.17614.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[649]}, {checkpoints,[{649,1}]}, {name,<<"rebalance_649">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[649]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"649"}]} [rebalance:debug,2014-08-19T16:51:08.927,ns_1@10.242.238.88:<0.17614.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17885.1> [rebalance:info,2014-08-19T16:51:08.927,ns_1@10.242.238.88:<0.17614.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.929,ns_1@10.242.238.88:<0.17614.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.929,ns_1@10.242.238.88:<0.17614.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:51:08.929,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 39. Nacking mccouch update. [views:debug,2014-08-19T16:51:08.929,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/39. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:08.929,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",39,active,0} [rebalance:info,2014-08-19T16:51:08.930,ns_1@10.242.238.88:<0.15017.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 649 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:08.933,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846,535, 480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351, 896,585,274,219,819,764,453,142,998,687,65,376,921,610,299,244,844,533,478, 167,712,401,1023,946,818,763,635,452,324,141,997,869,686,558,503,375,192,920, 792,737,609,426,298,243,115,971,843,660,532,477,349,166,894,89,711,583,400, 272,217,1022,945,817,762,634,451,323,140,996,868,685,63,557,502,374,191,919, 791,736,608,425,297,242,114,970,842,659,531,476,348,165,893,710,582,399,271, 216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790,735, 607,424,296,241,113,969,841,658,530,475,347,164,892,87,709,581,398,270,215, 1020,943,815,760,632,449,321,138,994,866,683,61,555,500,372,189,917,789,734, 606,423,295,240,112,968,840,657,529,474,346,163,891,708,580,397,269,214,1019, 942,814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422, 294,239,111,967,839,656,528,473,345,162,890,85,707,579,396,268,213,1018,941, 813,758,630,447,319,136,992,864,681,59,553,498,370,187,915,787,732,604,421, 293,238,110,966,838,655,527,472,344,161,889,706,578,395,267,212,1017,940,812, 757,629,446,318,135,991,863,680,552,497,369,186,914,786,731,603,420,292,237, 109,965,837,654,526,471,343,160,888,83,705,577,394,266,211,1016,939,811,756, 628,445,317,134,990,862,679,57,551,496,368,185,913,785,730,602,419,291,236, 108,964,836,653,525,470,342,159,887,704,576,393,265,210,1015,938,810,755,627, 444,316,133,989,861,678,550,495,367,184,912,784,729,601,418,290,235,107,963, 835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937,809,754,626,443, 315,132,988,860,677,55,549,494,366,183,911,783,728,600,417,289,234,962,834, 651,523,468,340,157,885,702,574,391,263,208,1013,936,808,753,625,442,314,131, 987,859,676,548,493,365,182,910,782,727,599,416,288,233,105,961,833,650,522, 467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624,441,313,130,986, 858,675,547,53,492,364,181,909,781,726,598,415,287,232,960,832,649,521,466, 338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,129,985,857,674, 546,491,363,180,908,780,725,597,414,286,231,103,959,831,648,520,465,337,154, 882,77,699,571,388,260,205,1010,805,750,439,128,984,673,51,362,907,596,285, 230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723,412, 101,957,646,335,880,569,258,203,803,748,437,126,982,671,49,360,905,594,283, 228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,99,776,721, 410,955,644,333,878,567,256,201,801,746,435,124,980,669,47,358,903,592,281, 226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97,774,719, 408,953,642,331,876,565,510,199,799,744,433,122,978,667,45,356,901,590,279, 224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95,772,717, 406,951,640,329,874,563,508,197,797,742,431,120,976,665,43,354,899,588,277, 222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93,770,715, 404,949,638,327,872,561,506,195,795,740,429,118,974,663,41,352,897,586,275, 220,820,765,454,143,999,688,377,922,611,300,245,845,534,479,168,91,768,713, 402,947,636,325,870,559,504,193,793,738,427,116,972,661,39,350,895,584,273, 218] [ns_server:debug,2014-08-19T16:51:08.935,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:08.935,ns_1@10.242.238.88:<0.15665.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.935,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.936,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{395, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:08.936,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:08.936,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:08.936,ns_1@10.242.238.88:<0.15025.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.938,ns_1@10.242.238.88:<0.17621.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_384 [ns_server:info,2014-08-19T16:51:08.938,ns_1@10.242.238.88:<0.15665.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_640_'ns_1@10.242.238.89'">>] [ns_server:info,2014-08-19T16:51:08.939,ns_1@10.242.238.88:<0.15025.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_649_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.939,ns_1@10.242.238.88:<0.17621.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[384]}, {checkpoints,[{384,1}]}, {name,<<"rebalance_384">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[384]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"384"}]} [rebalance:debug,2014-08-19T16:51:08.941,ns_1@10.242.238.88:<0.17621.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17897.1> [rebalance:info,2014-08-19T16:51:08.942,ns_1@10.242.238.88:<0.17621.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.943,ns_1@10.242.238.88:<0.17621.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.943,ns_1@10.242.238.88:<0.17621.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.944,ns_1@10.242.238.88:<0.15679.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 384 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.945,ns_1@10.242.238.88:<0.15687.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:08.945,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 395 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:08.946,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 395) [ns_server:debug,2014-08-19T16:51:08.946,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:08.947,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 904 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:08.947,ns_1@10.242.238.88:<0.17901.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 904 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:info,2014-08-19T16:51:08.949,ns_1@10.242.238.88:<0.15687.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_384_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:51:08.950,ns_1@10.242.238.88:<0.17624.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_397 [rebalance:info,2014-08-19T16:51:08.951,ns_1@10.242.238.88:<0.17624.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[397]}, {checkpoints,[{397,1}]}, {name,<<"rebalance_397">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[397]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"397"}]} [rebalance:debug,2014-08-19T16:51:08.952,ns_1@10.242.238.88:<0.17624.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17904.1> [rebalance:info,2014-08-19T16:51:08.953,ns_1@10.242.238.88:<0.17624.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.954,ns_1@10.242.238.88:<0.17624.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.954,ns_1@10.242.238.88:<0.17624.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.955,ns_1@10.242.238.88:<0.14739.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 397 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:08.961,ns_1@10.242.238.88:<0.17631.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_651 [rebalance:info,2014-08-19T16:51:08.963,ns_1@10.242.238.88:<0.17631.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[651]}, {checkpoints,[{651,1}]}, {name,<<"rebalance_651">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[651]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"651"}]} [rebalance:debug,2014-08-19T16:51:08.963,ns_1@10.242.238.88:<0.17631.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17905.1> [rebalance:info,2014-08-19T16:51:08.964,ns_1@10.242.238.88:<0.17631.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.968,ns_1@10.242.238.88:<0.17631.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.968,ns_1@10.242.238.88:<0.17631.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.969,ns_1@10.242.238.88:<0.14858.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 651 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.971,ns_1@10.242.238.88:<0.14866.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.973,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.973,ns_1@10.242.238.88:<0.17606.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_646 [rebalance:debug,2014-08-19T16:51:08.974,ns_1@10.242.238.88:<0.14747.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:08.975,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:08.975,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:08.975,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{904, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:info,2014-08-19T16:51:08.975,ns_1@10.242.238.88:<0.14866.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_651_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:08.975,ns_1@10.242.238.88:<0.17606.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[646]}, {checkpoints,[{646,1}]}, {name,<<"rebalance_646">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[646]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"646"}]} [ns_server:debug,2014-08-19T16:51:08.975,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:08.976,ns_1@10.242.238.88:<0.17606.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17911.1> [rebalance:info,2014-08-19T16:51:08.977,ns_1@10.242.238.88:<0.17606.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:51:08.977,ns_1@10.242.238.88:<0.14747.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_397_'ns_1@10.242.238.90'">>] [rebalance:debug,2014-08-19T16:51:08.980,ns_1@10.242.238.88:<0.17606.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.980,ns_1@10.242.238.88:<0.17606.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.981,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 904 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:51:08.982,ns_1@10.242.238.88:<0.15251.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 646 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:08.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 904) [ns_server:debug,2014-08-19T16:51:08.983,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:08.983,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 899 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:08.983,ns_1@10.242.238.88:<0.17921.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 899 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:debug,2014-08-19T16:51:08.983,ns_1@10.242.238.88:<0.15259.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:08.987,ns_1@10.242.238.88:<0.15259.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_646_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:08.989,ns_1@10.242.238.88:<0.17619.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_898 [rebalance:info,2014-08-19T16:51:08.991,ns_1@10.242.238.88:<0.17619.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[898]}, {checkpoints,[{898,1}]}, {name,<<"rebalance_898">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[898]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"898"}]} [rebalance:debug,2014-08-19T16:51:08.991,ns_1@10.242.238.88:<0.17619.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17924.1> [rebalance:info,2014-08-19T16:51:08.992,ns_1@10.242.238.88:<0.17619.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:08.995,ns_1@10.242.238.88:<0.17619.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:08.995,ns_1@10.242.238.88:<0.17619.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:08.996,ns_1@10.242.238.88:<0.15384.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 898 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:08.997,ns_1@10.242.238.88:<0.15392.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:09.000,ns_1@10.242.238.88:<0.17638.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_391 [ns_server:info,2014-08-19T16:51:09.001,ns_1@10.242.238.88:<0.15392.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_898_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:09.001,ns_1@10.242.238.88:<0.17638.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[391]}, {checkpoints,[{391,1}]}, {name,<<"rebalance_391">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[391]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"391"}]} [rebalance:debug,2014-08-19T16:51:09.002,ns_1@10.242.238.88:<0.17638.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17927.1> [rebalance:info,2014-08-19T16:51:09.003,ns_1@10.242.238.88:<0.17638.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:51:09.003,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.004,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.004,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:09.004,ns_1@10.242.238.88:<0.17638.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [ns_server:debug,2014-08-19T16:51:09.004,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{899, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:51:09.004,ns_1@10.242.238.88:<0.17638.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:51:09.005,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:51:09.005,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/39. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:09.005,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",39,active,0} [rebalance:info,2014-08-19T16:51:09.005,ns_1@10.242.238.88:<0.15209.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 391 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:09.007,ns_1@10.242.238.88:<0.15217.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:09.010,ns_1@10.242.238.88:<0.15217.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_391_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:09.011,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 899 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.012,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 899) [ns_server:debug,2014-08-19T16:51:09.012,ns_1@10.242.238.88:<0.17628.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_648 [ns_server:debug,2014-08-19T16:51:09.013,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.013,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 905 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:09.013,ns_1@10.242.238.88:<0.17940.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 905 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:51:09.013,ns_1@10.242.238.88:<0.17628.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[648]}, {checkpoints,[{648,1}]}, {name,<<"rebalance_648">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[648]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"648"}]} [rebalance:debug,2014-08-19T16:51:09.014,ns_1@10.242.238.88:<0.17628.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17941.1> [rebalance:info,2014-08-19T16:51:09.015,ns_1@10.242.238.88:<0.17628.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:09.017,ns_1@10.242.238.88:<0.17628.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:09.017,ns_1@10.242.238.88:<0.17628.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:09.018,ns_1@10.242.238.88:<0.15094.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 648 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:09.020,ns_1@10.242.238.88:<0.15102.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:09.022,ns_1@10.242.238.88:<0.15102.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_648_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:09.024,ns_1@10.242.238.88:<0.17605.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_642 [rebalance:info,2014-08-19T16:51:09.026,ns_1@10.242.238.88:<0.17605.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[642]}, {checkpoints,[{642,1}]}, {name,<<"rebalance_642">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[642]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"642"}]} [rebalance:debug,2014-08-19T16:51:09.027,ns_1@10.242.238.88:<0.17605.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17944.1> [rebalance:info,2014-08-19T16:51:09.028,ns_1@10.242.238.88:<0.17605.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:09.031,ns_1@10.242.238.88:<0.17605.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:09.031,ns_1@10.242.238.88:<0.17605.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:09.032,ns_1@10.242.238.88:<0.15545.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 642 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:09.035,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:09.035,ns_1@10.242.238.88:<0.15553.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:09.035,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.035,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.036,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.036,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{905, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.036,ns_1@10.242.238.88:<0.17615.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_393 [rebalance:info,2014-08-19T16:51:09.038,ns_1@10.242.238.88:<0.17615.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[393]}, {checkpoints,[{393,1}]}, {name,<<"rebalance_393">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[393]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"393"}]} [ns_server:info,2014-08-19T16:51:09.039,ns_1@10.242.238.88:<0.15553.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_642_'ns_1@10.242.238.89'">>] [rebalance:debug,2014-08-19T16:51:09.039,ns_1@10.242.238.88:<0.17615.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17954.1> [rebalance:info,2014-08-19T16:51:09.040,ns_1@10.242.238.88:<0.17615.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:09.042,ns_1@10.242.238.88:<0.17615.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:09.042,ns_1@10.242.238.88:<0.17615.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:09.043,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 905 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:51:09.043,ns_1@10.242.238.88:<0.15052.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 393 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:09.043,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 905) [ns_server:debug,2014-08-19T16:51:09.044,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.045,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 390 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:09.045,ns_1@10.242.238.88:<0.17958.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 390 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:debug,2014-08-19T16:51:09.045,ns_1@10.242.238.88:<0.15060.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:09.048,ns_1@10.242.238.88:<0.15060.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_393_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:51:09.049,ns_1@10.242.238.88:<0.17620.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_901 [rebalance:info,2014-08-19T16:51:09.050,ns_1@10.242.238.88:<0.17620.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[901]}, {checkpoints,[{901,1}]}, {name,<<"rebalance_901">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[901]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"901"}]} [rebalance:debug,2014-08-19T16:51:09.051,ns_1@10.242.238.88:<0.17620.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17961.1> [rebalance:info,2014-08-19T16:51:09.051,ns_1@10.242.238.88:<0.17620.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:09.053,ns_1@10.242.238.88:<0.17620.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:09.053,ns_1@10.242.238.88:<0.17620.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:09.054,ns_1@10.242.238.88:<0.15150.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 901 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:09.056,ns_1@10.242.238.88:<0.15158.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:09.059,ns_1@10.242.238.88:<0.15158.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_901_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:09.060,ns_1@10.242.238.88:<0.17622.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_650 [ns_server:debug,2014-08-19T16:51:09.062,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:09.062,ns_1@10.242.238.88:<0.17622.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[650]}, {checkpoints,[{650,1}]}, {name,<<"rebalance_650">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[650]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"650"}]} [ns_server:debug,2014-08-19T16:51:09.062,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:debug,2014-08-19T16:51:09.063,ns_1@10.242.238.88:<0.17622.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17966.1> [ns_server:debug,2014-08-19T16:51:09.063,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.063,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.063,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{390, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:51:09.064,ns_1@10.242.238.88:<0.17622.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:09.066,ns_1@10.242.238.88:<0.17622.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:09.066,ns_1@10.242.238.88:<0.17622.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:09.068,ns_1@10.242.238.88:<0.14935.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 650 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:09.070,ns_1@10.242.238.88:<0.14943.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:09.073,ns_1@10.242.238.88:<0.14943.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_650_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:09.074,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 390 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.075,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 390) [ns_server:debug,2014-08-19T16:51:09.075,ns_1@10.242.238.88:<0.17634.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_903 [ns_server:debug,2014-08-19T16:51:09.075,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.075,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 392 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:09.075,ns_1@10.242.238.88:<0.17984.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 392 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:info,2014-08-19T16:51:09.075,ns_1@10.242.238.88:<0.17634.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[903]}, {checkpoints,[{903,1}]}, {name,<<"rebalance_903">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[903]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"903"}]} [rebalance:debug,2014-08-19T16:51:09.076,ns_1@10.242.238.88:<0.17634.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.17985.1> [rebalance:info,2014-08-19T16:51:09.077,ns_1@10.242.238.88:<0.17634.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:09.079,ns_1@10.242.238.88:<0.17634.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:09.079,ns_1@10.242.238.88:<0.17634.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:09.080,ns_1@10.242.238.88:<0.14991.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 903 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:09.081,ns_1@10.242.238.88:<0.14999.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:09.085,ns_1@10.242.238.88:<0.14999.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_903_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:09.093,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.094,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.094,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{392, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.094,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.094,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:51:09.107,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 392 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.107,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 392) [ns_server:debug,2014-08-19T16:51:09.108,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.108,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 641 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:09.108,ns_1@10.242.238.88:<0.18004.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 641 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:09.115,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 37. Nacking mccouch update. [views:debug,2014-08-19T16:51:09.115,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/37. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:09.116,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",37,active,0} [ns_server:debug,2014-08-19T16:51:09.117,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846,535, 480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351, 896,585,274,219,819,764,453,142,998,687,65,376,921,610,299,244,844,533,478, 167,712,401,1023,946,818,763,635,452,324,141,997,869,686,558,503,375,192,920, 792,737,609,426,298,243,115,971,843,660,532,477,349,166,894,89,711,583,400, 272,217,1022,945,817,762,634,451,323,140,996,868,685,63,557,502,374,191,919, 791,736,608,425,297,242,114,970,842,659,531,476,37,348,165,893,710,582,399, 271,216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790, 735,607,424,296,241,113,969,841,658,530,475,347,164,892,87,709,581,398,270, 215,1020,943,815,760,632,449,321,138,994,866,683,61,555,500,372,189,917,789, 734,606,423,295,240,112,968,840,657,529,474,346,163,891,708,580,397,269,214, 1019,942,814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605, 422,294,239,111,967,839,656,528,473,345,162,890,85,707,579,396,268,213,1018, 941,813,758,630,447,319,136,992,864,681,59,553,498,370,187,915,787,732,604, 421,293,238,110,966,838,655,527,472,344,161,889,706,578,395,267,212,1017,940, 812,757,629,446,318,135,991,863,680,552,497,369,186,914,786,731,603,420,292, 237,109,965,837,654,526,471,343,160,888,83,705,577,394,266,211,1016,939,811, 756,628,445,317,134,990,862,679,57,551,496,368,185,913,785,730,602,419,291, 236,108,964,836,653,525,470,342,159,887,704,576,393,265,210,1015,938,810,755, 627,444,316,133,989,861,678,550,495,367,184,912,784,729,601,418,290,235,107, 963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937,809,754,626, 443,315,132,988,860,677,55,549,494,366,183,911,783,728,600,417,289,234,962, 834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808,753,625,442,314, 131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105,961,833,650, 522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624,441,313,130, 986,858,675,547,53,492,364,181,909,781,726,598,415,287,232,960,832,649,521, 466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,129,985,857, 674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648,520,465,337, 154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673,51,362,907,596, 285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723, 412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,49,360,905,594, 283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,99,776, 721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,47,358,903,592, 281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97,774, 719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,45,356,901,590, 279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95,772, 717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,43,354,899,588, 277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93,770, 715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,41,352,897,586, 275,220,820,765,454,143,999,688,377,922,611,300,245,845,534,479,168,91,768, 713,402,947,636,325,870,559,504,193,793,738,427,116,972,661,39,350,895,584, 273,218] [ns_server:debug,2014-08-19T16:51:09.130,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.131,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.131,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{641, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.131,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.132,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:09.141,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 641 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.142,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 641) [ns_server:debug,2014-08-19T16:51:09.143,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.143,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 645 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:09.143,ns_1@10.242.238.88:<0.18015.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 645 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:info,2014-08-19T16:51:09.156,ns_1@10.242.238.88:<0.20999.0>:ns_orchestrator:handle_info:428]Skipping janitor in state rebalancing [ns_server:debug,2014-08-19T16:51:09.161,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.162,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.162,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.162,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{645, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.162,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:09.168,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 645 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.169,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 645) [ns_server:debug,2014-08-19T16:51:09.169,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.169,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 647 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:09.170,ns_1@10.242.238.88:<0.18031.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 647 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [views:debug,2014-08-19T16:51:09.174,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/37. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:09.174,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",37,active,0} [ns_server:debug,2014-08-19T16:51:09.187,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.187,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.188,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.188,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{647, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.188,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:09.193,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 647 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.193,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 647) [ns_server:debug,2014-08-19T16:51:09.194,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.194,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 643 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:09.194,ns_1@10.242.238.88:<0.18042.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 643 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:09.214,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.215,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.215,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{643, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.215,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.216,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:09.223,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 643 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.223,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 643) [ns_server:debug,2014-08-19T16:51:09.224,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.224,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 389 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:09.224,ns_1@10.242.238.88:<0.18053.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 389 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:09.240,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.241,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.241,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.241,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{389, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.241,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:09.253,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 389 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.253,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 389) [ns_server:debug,2014-08-19T16:51:09.254,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.254,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 902 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:09.254,ns_1@10.242.238.88:<0.18064.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 902 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:51:09.290,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.291,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.291,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.291,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.291,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{902, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:51:09.297,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 902 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.298,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 902) [ns_server:debug,2014-08-19T16:51:09.299,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.299,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 644 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:09.299,ns_1@10.242.238.88:<0.18089.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 644 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:09.317,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.318,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.318,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{644, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.318,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.318,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:09.328,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 644 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.331,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.331,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 388 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:09.331,ns_1@10.242.238.88:<0.18100.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 388 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:09.334,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 644) [ns_server:debug,2014-08-19T16:51:09.347,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.348,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.348,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.349,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 35. Nacking mccouch update. [ns_server:debug,2014-08-19T16:51:09.349,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.349,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{388, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:09.349,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/35. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:09.349,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",35,active,0} [ns_server:debug,2014-08-19T16:51:09.352,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846,535, 480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351, 896,585,274,219,819,764,453,142,998,687,65,376,921,610,299,244,844,533,478, 167,712,401,1023,946,818,763,635,452,324,141,997,869,686,558,503,375,192,920, 792,737,609,426,298,243,115,971,843,660,532,477,349,166,894,89,711,583,400, 272,217,1022,945,817,762,634,451,323,140,996,868,685,63,557,502,374,191,919, 791,736,608,425,297,242,114,970,842,659,531,476,37,348,165,893,710,582,399, 271,216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790, 735,607,424,296,241,113,969,841,658,530,475,347,164,892,87,709,581,398,270, 215,1020,943,815,760,632,449,321,138,994,866,683,61,555,500,372,189,917,789, 734,606,423,295,240,112,968,840,657,529,474,35,346,163,891,708,580,397,269, 214,1019,942,814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733, 605,422,294,239,111,967,839,656,528,473,345,162,890,85,707,579,396,268,213, 1018,941,813,758,630,447,319,136,992,864,681,59,553,498,370,187,915,787,732, 604,421,293,238,110,966,838,655,527,472,344,161,889,706,578,395,267,212,1017, 940,812,757,629,446,318,135,991,863,680,552,497,369,186,914,786,731,603,420, 292,237,109,965,837,654,526,471,343,160,888,83,705,577,394,266,211,1016,939, 811,756,628,445,317,134,990,862,679,57,551,496,368,185,913,785,730,602,419, 291,236,108,964,836,653,525,470,342,159,887,704,576,393,265,210,1015,938,810, 755,627,444,316,133,989,861,678,550,495,367,184,912,784,729,601,418,290,235, 107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937,809,754, 626,443,315,132,988,860,677,55,549,494,366,183,911,783,728,600,417,289,234, 962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808,753,625,442, 314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105,961,833, 650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624,441,313, 130,986,858,675,547,53,492,364,181,909,781,726,598,415,287,232,960,832,649, 521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,129,985, 857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648,520,465, 337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673,51,362,907, 596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778, 723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,49,360,905, 594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,99, 776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,47,358,903, 592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97, 774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,45,356,901, 590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95, 772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,43,354,899, 588,277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93, 770,715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,41,352,897, 586,275,220,820,765,454,143,999,688,377,922,611,300,245,845,534,479,168,91, 768,713,402,947,636,325,870,559,504,193,793,738,427,116,972,661,39,350,895, 584,273,218] [rebalance:info,2014-08-19T16:51:09.358,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 388 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.359,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 388) [ns_server:debug,2014-08-19T16:51:09.359,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.359,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 909 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:09.360,ns_1@10.242.238.88:<0.18110.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 909 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:51:09.382,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:51:09.382,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/35. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:09.383,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",35,active,0} [ns_server:debug,2014-08-19T16:51:09.383,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.383,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{909, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.385,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.386,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:09.388,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 909 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.389,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 909) [ns_server:debug,2014-08-19T16:51:09.390,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.390,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 900 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:09.390,ns_1@10.242.238.88:<0.18121.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 900 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:51:09.418,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.418,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.419,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.419,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{900, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.419,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:09.426,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 900 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.426,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 900) [ns_server:debug,2014-08-19T16:51:09.427,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.427,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 386 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:09.427,ns_1@10.242.238.88:<0.18132.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 386 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:09.444,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.445,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.445,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{386, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.447,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.447,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:51:09.457,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 386 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.457,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 386) [ns_server:debug,2014-08-19T16:51:09.458,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.458,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 394 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:09.458,ns_1@10.242.238.88:<0.18157.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 394 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:09.475,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.476,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.476,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.476,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.476,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{394, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:51:09.485,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 394 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.486,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 394) [ns_server:debug,2014-08-19T16:51:09.486,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.486,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 896 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:09.486,ns_1@10.242.238.88:<0.18168.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 896 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:51:09.499,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 33. Nacking mccouch update. [views:debug,2014-08-19T16:51:09.499,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/33. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:09.499,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",33,active,0} [ns_server:debug,2014-08-19T16:51:09.501,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846,535, 480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351, 896,585,274,219,819,764,453,142,998,687,65,376,921,610,299,244,844,533,478, 167,712,401,1023,946,818,763,635,452,324,141,997,869,686,558,503,375,192,920, 792,737,609,426,298,243,115,971,843,660,532,477,349,166,894,89,711,583,400, 272,217,1022,945,817,762,634,451,323,140,996,868,685,63,557,502,374,191,919, 791,736,608,425,297,242,114,970,842,659,531,476,37,348,165,893,710,582,399, 271,216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790, 735,607,424,296,241,113,969,841,658,530,475,347,164,892,87,709,581,398,270, 215,1020,943,815,760,632,449,321,138,994,866,683,61,555,500,372,189,917,789, 734,606,423,295,240,112,968,840,657,529,474,35,346,163,891,708,580,397,269, 214,1019,942,814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733, 605,422,294,239,111,967,839,656,528,473,345,162,890,85,707,579,396,268,213, 1018,941,813,758,630,447,319,136,992,864,681,59,553,498,370,187,915,787,732, 604,421,293,238,110,966,838,655,527,472,344,33,161,889,706,578,395,267,212, 1017,940,812,757,629,446,318,135,991,863,680,552,497,369,186,914,786,731,603, 420,292,237,109,965,837,654,526,471,343,160,888,83,705,577,394,266,211,1016, 939,811,756,628,445,317,134,990,862,679,57,551,496,368,185,913,785,730,602, 419,291,236,108,964,836,653,525,470,342,159,887,704,576,393,265,210,1015,938, 810,755,627,444,316,133,989,861,678,550,495,367,184,912,784,729,601,418,290, 235,107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937,809, 754,626,443,315,132,988,860,677,55,549,494,366,183,911,783,728,600,417,289, 234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808,753,625, 442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105,961, 833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624,441, 313,130,986,858,675,547,53,492,364,181,909,781,726,598,415,287,232,960,832, 649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,129, 985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648,520, 465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673,51,362, 907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178, 778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,49,360, 905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176, 99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,47,358, 903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174, 97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,45,356, 901,590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172, 95,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,43,354, 899,588,277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170, 93,770,715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,41,352, 897,586,275,220,820,765,454,143,999,688,377,922,611,300,245,845,534,479,168, 91,768,713,402,947,636,325,870,559,504,193,793,738,427,116,972,661,39,350, 895,584,273,218] [ns_server:debug,2014-08-19T16:51:09.511,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.512,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.512,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{896, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.512,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.513,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:09.524,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 896 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.524,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 896) [ns_server:debug,2014-08-19T16:51:09.525,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.525,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 897 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:09.525,ns_1@10.242.238.88:<0.18180.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 897 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [views:debug,2014-08-19T16:51:09.532,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/33. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:09.533,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",33,active,0} [ns_server:debug,2014-08-19T16:51:09.548,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.549,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.549,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.550,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.550,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{897, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:51:09.556,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 897 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.556,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 897) [ns_server:debug,2014-08-19T16:51:09.557,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.557,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 640 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:09.557,ns_1@10.242.238.88:<0.18191.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 640 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:09.576,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.577,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.577,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.577,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{640, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.578,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:09.583,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 640 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.584,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 640) [ns_server:debug,2014-08-19T16:51:09.585,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.585,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 649 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:09.585,ns_1@10.242.238.88:<0.18202.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 649 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:09.603,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.604,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.605,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.605,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{649, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.605,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:09.614,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 649 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.615,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 649) [ns_server:debug,2014-08-19T16:51:09.616,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.616,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 384 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:09.616,ns_1@10.242.238.88:<0.18226.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 384 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:09.633,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.633,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.634,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.634,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{384, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.634,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:09.645,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 384 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.646,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 384) [ns_server:debug,2014-08-19T16:51:09.647,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.647,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 651 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:09.647,ns_1@10.242.238.88:<0.18237.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 651 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:09.669,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.670,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.670,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.670,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.671,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{651, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:51:09.679,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 651 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.680,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 651) [ns_server:debug,2014-08-19T16:51:09.680,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.680,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 397 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:09.681,ns_1@10.242.238.88:<0.18248.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 397 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:09.691,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 31. Nacking mccouch update. [views:debug,2014-08-19T16:51:09.691,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/31. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:09.691,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",31,active,0} [ns_server:debug,2014-08-19T16:51:09.694,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846,535, 480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351, 896,585,274,219,819,764,453,142,998,687,65,376,921,610,299,244,844,533,478, 167,712,401,1023,946,818,763,635,452,324,141,997,869,686,558,503,375,192,920, 792,737,609,426,298,243,115,971,843,660,532,477,349,166,894,89,711,583,400, 272,217,1022,945,817,762,634,451,323,140,996,868,685,63,557,502,374,191,919, 791,736,608,425,297,242,114,970,842,659,531,476,37,348,165,893,710,582,399, 271,216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790, 735,607,424,296,241,113,969,841,658,530,475,347,164,892,87,709,581,398,270, 215,1020,943,815,760,632,449,321,138,994,866,683,61,555,500,372,189,917,789, 734,606,423,295,240,112,968,840,657,529,474,35,346,163,891,708,580,397,269, 214,1019,942,814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733, 605,422,294,239,111,967,839,656,528,473,345,162,890,85,707,579,396,268,213, 1018,941,813,758,630,447,319,136,992,864,681,59,553,498,370,187,915,787,732, 604,421,293,238,110,966,838,655,527,472,344,33,161,889,706,578,395,267,212, 1017,940,812,757,629,446,318,135,991,863,680,552,497,369,186,914,786,731,603, 420,292,237,109,965,837,654,526,471,343,160,888,83,705,577,394,266,211,1016, 939,811,756,628,445,317,134,990,862,679,57,551,496,368,185,913,785,730,602, 419,291,236,108,964,836,653,525,470,342,31,159,887,704,576,393,265,210,1015, 938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784,729,601,418, 290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937, 809,754,626,443,315,132,988,860,677,55,549,494,366,183,911,783,728,600,417, 289,234,962,834,651,523,468,340,157,885,702,574,391,263,208,1013,936,808,753, 625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105, 961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,53,492,364,181,909,781,726,598,415,287,232,960, 832,649,521,466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312, 129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648, 520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673,51, 362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489, 178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,49, 360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487, 176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,47, 358,903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485, 174,97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,45, 356,901,590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483, 172,95,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,43, 354,899,588,277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481, 170,93,770,715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,41, 352,897,586,275,220,820,765,454,143,999,688,377,922,611,300,245,845,534,479, 168,91,768,713,402,947,636,325,870,559,504,193,793,738,427,116,972,661,39, 350,895,584,273,218] [ns_server:debug,2014-08-19T16:51:09.698,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.699,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.699,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{397, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.699,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.699,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:09.708,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 397 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.708,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 397) [ns_server:debug,2014-08-19T16:51:09.709,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.709,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 646 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:09.709,ns_1@10.242.238.88:<0.18259.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 646 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:09.730,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.731,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.731,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.731,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{646, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.731,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:09.746,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 646 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.747,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 646) [ns_server:debug,2014-08-19T16:51:09.747,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.747,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 898 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:09.747,ns_1@10.242.238.88:<0.18270.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 898 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [views:debug,2014-08-19T16:51:09.750,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/31. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:09.750,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",31,active,0} [ns_server:debug,2014-08-19T16:51:09.768,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.769,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.769,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.769,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{898, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.769,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:09.775,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 898 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.776,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 898) [ns_server:debug,2014-08-19T16:51:09.776,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.776,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 391 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:09.777,ns_1@10.242.238.88:<0.18281.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 391 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:09.795,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.796,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.796,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.796,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.797,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{391, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:51:09.802,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 391 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.802,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 391) [ns_server:debug,2014-08-19T16:51:09.803,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.803,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 648 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:09.803,ns_1@10.242.238.88:<0.18306.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 648 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:09.830,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.830,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.831,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.831,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{648, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:09.831,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:09.840,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 648 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.841,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 648) [ns_server:debug,2014-08-19T16:51:09.841,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.842,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 642 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [ns_server:debug,2014-08-19T16:51:09.842,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 29. Nacking mccouch update. [rebalance:info,2014-08-19T16:51:09.842,ns_1@10.242.238.88:<0.18317.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 642 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [views:debug,2014-08-19T16:51:09.842,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/29. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:09.842,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",29,active,0} [ns_server:debug,2014-08-19T16:51:09.845,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846,535, 480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351, 896,585,274,219,819,764,453,142,998,687,65,376,921,610,299,244,844,533,478, 167,712,401,1023,946,635,324,997,869,686,558,503,375,192,920,792,737,609,426, 298,243,115,971,843,660,532,477,349,166,894,89,711,583,400,272,217,1022,945, 817,762,634,451,323,140,996,868,685,63,557,502,374,191,919,791,736,608,425, 297,242,114,970,842,659,531,476,37,348,165,893,710,582,399,271,216,1021,944, 816,761,633,450,322,139,995,867,684,556,501,373,190,918,790,735,607,424,296, 241,113,969,841,658,530,475,347,164,892,87,709,581,398,270,215,1020,943,815, 760,632,449,321,138,994,866,683,61,555,500,372,189,917,789,734,606,423,295, 240,112,968,840,657,529,474,35,346,163,891,708,580,397,269,214,1019,942,814, 759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422,294,239, 111,967,839,656,528,473,345,162,890,85,707,579,396,268,213,1018,941,813,758, 630,447,319,136,992,864,681,59,553,498,370,187,915,787,732,604,421,293,238, 110,966,838,655,527,472,344,33,161,889,706,578,395,267,212,1017,940,812,757, 629,446,318,135,991,863,680,552,497,369,186,914,786,731,603,420,292,237,109, 965,837,654,526,471,343,160,888,83,705,577,394,266,211,1016,939,811,756,628, 445,317,134,990,862,679,57,551,496,368,185,913,785,730,602,419,291,236,108, 964,836,653,525,470,342,31,159,887,704,576,393,265,210,1015,938,810,755,627, 444,316,133,989,861,678,550,495,367,184,912,784,729,601,418,290,235,107,963, 835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937,809,754,626,443, 315,132,988,860,677,55,549,494,366,183,911,783,728,600,417,289,234,962,834, 651,523,468,340,29,157,885,702,574,391,263,208,1013,936,808,753,625,442,314, 131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105,961,833,650, 522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624,441,313,130, 986,858,675,547,53,492,364,181,909,781,726,598,415,287,232,960,832,649,521, 466,338,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,129,985,857, 674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648,520,465,337, 154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673,51,362,907,596, 285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778,723, 412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,49,360,905,594, 283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,99,776, 721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,47,358,903,592, 281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97,774, 719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,45,356,901,590, 279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95,772, 717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,43,354,899,588, 277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93,770, 715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,41,352,897,586, 275,220,820,765,454,143,999,688,377,922,611,300,245,845,534,479,168,91,768, 713,402,947,636,325,870,559,504,193,793,738,427,116,972,661,39,350,895,584, 273,218,818,763,452,141] [ns_server:debug,2014-08-19T16:51:09.861,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.861,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.862,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.862,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.862,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{642, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:51:09.870,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 642 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.870,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 642) [ns_server:debug,2014-08-19T16:51:09.871,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.871,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 393 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:09.871,ns_1@10.242.238.88:<0.18328.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 393 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:09.889,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.891,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.891,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.891,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.892,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{393, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:09.893,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/29. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:09.893,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",29,active,0} [rebalance:info,2014-08-19T16:51:09.900,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 393 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.901,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 393) [ns_server:debug,2014-08-19T16:51:09.902,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{compact,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:09.902,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 901 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:09.902,ns_1@10.242.238.88:<0.18340.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 901 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:51:09.923,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.923,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.923,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.924,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.924,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{901, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:51:09.931,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 901 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.932,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 901) [ns_server:debug,2014-08-19T16:51:09.933,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:09.933,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 650 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:09.933,ns_1@10.242.238.88:<0.18357.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 650 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:09.957,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.957,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.958,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.958,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.958,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{650, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:51:09.965,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 650 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:09.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 650) [ns_server:debug,2014-08-19T16:51:09.966,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{compact,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:09.966,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 903 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:09.967,ns_1@10.242.238.88:<0.18377.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 903 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:51:09.968,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 27. Nacking mccouch update. [views:debug,2014-08-19T16:51:09.968,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/27. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:09.968,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",27,active,0} [ns_server:debug,2014-08-19T16:51:09.970,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,881,570,259,204,804,749, 438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931,620, 309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747,436, 125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618,307, 252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434,123, 979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305,250, 850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121,977, 666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248,848, 537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975,664, 353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846,535, 480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662,351, 896,585,274,219,819,764,453,142,998,687,65,376,921,610,299,244,844,533,478, 167,712,401,1023,946,635,324,997,869,686,558,503,375,192,920,792,737,609,426, 298,243,115,971,843,660,532,477,349,166,894,89,711,583,400,272,217,1022,945, 817,762,634,451,323,140,996,868,685,63,557,502,374,191,919,791,736,608,425, 297,242,114,970,842,659,531,476,37,348,165,893,710,582,399,271,216,1021,944, 816,761,633,450,322,139,995,867,684,556,501,373,190,918,790,735,607,424,296, 241,113,969,841,658,530,475,347,164,892,87,709,581,398,270,215,1020,943,815, 760,632,449,321,138,994,866,683,61,555,500,372,189,917,789,734,606,423,295, 240,112,968,840,657,529,474,35,346,163,891,708,580,397,269,214,1019,942,814, 759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422,294,239, 111,967,839,656,528,473,345,162,890,85,707,579,396,268,213,1018,941,813,758, 630,447,319,136,992,864,681,59,553,498,370,187,915,787,732,604,421,293,238, 110,966,838,655,527,472,344,33,161,889,706,578,395,267,212,1017,940,812,757, 629,446,318,135,991,863,680,552,497,369,186,914,786,731,603,420,292,237,109, 965,837,654,526,471,343,160,888,83,705,577,394,266,211,1016,939,811,756,628, 445,317,134,990,862,679,57,551,496,368,185,913,785,730,602,419,291,236,108, 964,836,653,525,470,342,31,159,887,704,576,393,265,210,1015,938,810,755,627, 444,316,133,989,861,678,550,495,367,184,912,784,729,601,418,290,235,107,963, 835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937,809,754,626,443, 315,132,988,860,677,55,549,494,366,183,911,783,728,600,417,289,234,962,834, 651,523,468,340,29,157,885,702,574,391,263,208,1013,936,808,753,625,442,314, 131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105,961,833,650, 522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624,441,313,130, 986,858,675,547,53,492,364,181,909,781,726,598,415,287,232,960,832,649,521, 466,338,27,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,129,985, 857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648,520,465, 337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673,51,362,907, 596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178,778, 723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,49,360,905, 594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176,99, 776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,47,358,903, 592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174,97, 774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,45,356,901, 590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172,95, 772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,43,354,899, 588,277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170,93, 770,715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,41,352,897, 586,275,220,820,765,454,143,999,688,377,922,611,300,245,845,534,479,168,91, 768,713,402,947,636,325,870,559,504,193,793,738,427,116,972,661,39,350,895, 584,273,218,818,763,452,141] [ns_server:debug,2014-08-19T16:51:09.992,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.993,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:09.993,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.993,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:09.993,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{903, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:51:10.000,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 903 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:10.001,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 903) [views:debug,2014-08-19T16:51:10.002,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/27. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:10.002,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",27,active,0} [ns_server:debug,2014-08-19T16:51:10.002,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{compact,'ns_1@10.242.238.91'},{compact,'ns_1@10.242.238.88'}] [ns_server:debug,2014-08-19T16:51:10.002,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:handle_info:203]noted compaction done: {compact,'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:10.003,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:handle_info:447]Starting compaction for the following buckets: [<<"default">>] [ns_server:debug,2014-08-19T16:51:10.004,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:compact_next_bucket:1453]Going to spawn bucket compaction with forced view compaction for bucket default [ns_server:debug,2014-08-19T16:51:10.004,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:compact_next_bucket:1482]Spawned 'uninhibited' compaction for default [ns_server:debug,2014-08-19T16:51:10.004,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:51:10.004,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:handle_info:203]noted compaction done: {compact,'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:10.005,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:info,2014-08-19T16:51:10.006,ns_1@10.242.238.88:<0.18389.1>:compaction_daemon:check_all_dbs_exist:1611]Skipping compaction of bucket `default` since at least database `default/0` seems to be missing. [ns_server:debug,2014-08-19T16:51:10.006,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:handle_info:505]Finished compaction iteration. [ns_server:debug,2014-08-19T16:51:10.006,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:handle_info:203]noted compaction done: {compact,'ns_1@10.242.238.88'} [ns_server:debug,2014-08-19T16:51:10.007,ns_1@10.242.238.88:compaction_daemon<0.18062.0>:compaction_daemon:schedule_next_compaction:1519]Finished compaction too soon. Next run will be in 30s [ns_server:debug,2014-08-19T16:51:10.009,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{639, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:10.009,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",639, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.18390.1>) [ns_server:debug,2014-08-19T16:51:10.009,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 639) [ns_server:debug,2014-08-19T16:51:10.009,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:handle_info:203]noted compaction done: {compact,'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:51:10.010,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:10.010,ns_1@10.242.238.88:<0.18390.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 639 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:10.010,ns_1@10.242.238.88:<0.18396.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 639 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:10.010,ns_1@10.242.238.88:<0.18397.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 639 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:10.013,ns_1@10.242.238.88:<0.18398.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 639 into 'ns_1@10.242.238.89' is <18124.31680.0> [ns_server:debug,2014-08-19T16:51:10.015,ns_1@10.242.238.88:<0.18398.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 639 into 'ns_1@10.242.238.90' is <18125.26778.0> [rebalance:debug,2014-08-19T16:51:10.015,ns_1@10.242.238.88:<0.18390.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 639 is <0.18398.1> [ns_server:debug,2014-08-19T16:51:10.041,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,32521}, tap_estimate, {replica_building,"default",639,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31680.0>, <<"replication_building_639_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:10.052,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,43551}, tap_estimate, {replica_building,"default",639,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.26778.0>, <<"replication_building_639_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:10.053,ns_1@10.242.238.88:<0.18399.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.26778.0>}, {'ns_1@10.242.238.89',<18124.31680.0>}]) [rebalance:info,2014-08-19T16:51:10.053,ns_1@10.242.238.88:<0.18390.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:51:10.053,ns_1@10.242.238.88:<0.18390.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 639 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:10.054,ns_1@10.242.238.88:<0.18390.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:10.058,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{639, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:10.060,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{895, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:10.060,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",895, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.18426.1>) [ns_server:debug,2014-08-19T16:51:10.061,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 895) [ns_server:debug,2014-08-19T16:51:10.061,ns_1@10.242.238.88:<0.18427.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [rebalance:info,2014-08-19T16:51:10.061,ns_1@10.242.238.88:<0.18426.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 895 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:10.061,ns_1@10.242.238.88:<0.18432.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 895 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:10.061,ns_1@10.242.238.88:<0.18433.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 895 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:10.065,ns_1@10.242.238.88:<0.18434.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 895 into 'ns_1@10.242.238.89' is <18124.31685.0> [ns_server:debug,2014-08-19T16:51:10.067,ns_1@10.242.238.88:<0.18434.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 895 into 'ns_1@10.242.238.91' is <18126.27970.0> [rebalance:debug,2014-08-19T16:51:10.067,ns_1@10.242.238.88:<0.18426.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 895 is <0.18434.1> [ns_server:debug,2014-08-19T16:51:10.099,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,90461}, tap_estimate, {replica_building,"default",895,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31685.0>, <<"replication_building_895_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:10.102,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 25. Nacking mccouch update. [views:debug,2014-08-19T16:51:10.102,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/25. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:10.102,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",25,active,0} [ns_server:debug,2014-08-19T16:51:10.105,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,25,881,570,259,204,804, 749,438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931, 620,309,254,854,543,488,177,777,722,411,956,645,334,879,568,257,202,802,747, 436,125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929,618, 307,252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745,434, 123,979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616,305, 250,850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432,121, 977,666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303,248, 848,537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119,975, 664,353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246,846, 535,480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973,662, 351,896,585,274,219,819,764,453,142,998,687,65,376,921,610,299,244,844,533, 478,167,712,401,1023,946,635,324,997,869,686,558,503,375,192,920,792,737,609, 426,298,243,115,971,843,660,532,477,349,166,894,89,711,583,400,272,217,1022, 945,817,762,634,451,323,140,996,868,685,63,557,502,374,191,919,791,736,608, 425,297,242,114,970,842,659,531,476,37,348,165,893,710,582,399,271,216,1021, 944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790,735,607,424, 296,241,113,969,841,658,530,475,347,164,892,87,709,581,398,270,215,1020,943, 815,760,632,449,321,138,994,866,683,61,555,500,372,189,917,789,734,606,423, 295,240,112,968,840,657,529,474,35,346,163,891,708,580,397,269,214,1019,942, 814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422,294, 239,111,967,839,656,528,473,345,162,890,85,707,579,396,268,213,1018,941,813, 758,630,447,319,136,992,864,681,59,553,498,370,187,915,787,732,604,421,293, 238,110,966,838,655,527,472,344,33,161,889,706,578,395,267,212,1017,940,812, 757,629,446,318,135,991,863,680,552,497,369,186,914,786,731,603,420,292,237, 109,965,837,654,526,471,343,160,888,83,705,577,394,266,211,1016,939,811,756, 628,445,317,134,990,862,679,57,551,496,368,185,913,785,730,602,419,291,236, 108,964,836,653,525,470,342,31,159,887,704,576,393,265,210,1015,938,810,755, 627,444,316,133,989,861,678,550,495,367,184,912,784,729,601,418,290,235,107, 963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937,809,754,626, 443,315,132,988,860,677,55,549,494,366,183,911,783,728,600,417,289,234,962, 834,651,523,468,340,29,157,885,702,574,391,263,208,1013,936,808,753,625,442, 314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105,961,833, 650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624,441,313, 130,986,858,675,547,53,492,364,181,909,781,726,598,415,287,232,960,832,649, 521,466,338,27,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,129, 985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648,520, 465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673,51,362, 907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178, 778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,49,360, 905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176, 99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,47,358, 903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174, 97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,45,356, 901,590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172, 95,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,43,354, 899,588,277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170, 93,770,715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,41,352, 897,586,275,220,820,765,454,143,999,688,377,922,611,300,245,845,534,479,168, 91,768,713,402,947,636,325,870,559,504,193,793,738,427,116,972,661,39,350, 895,584,273,218,818,763,452,141] [ns_server:debug,2014-08-19T16:51:10.107,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,98048}, tap_estimate, {replica_building,"default",895,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27970.0>, <<"replication_building_895_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:51:10.107,ns_1@10.242.238.88:<0.18435.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27970.0>}, {'ns_1@10.242.238.89',<18124.31685.0>}]) [rebalance:info,2014-08-19T16:51:10.107,ns_1@10.242.238.88:<0.18426.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:10.108,ns_1@10.242.238.88:<0.18426.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 895 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:10.108,ns_1@10.242.238.88:<0.18426.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:10.109,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{895, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:10.112,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{383, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:51:10.112,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",383, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.18447.1>) [ns_server:debug,2014-08-19T16:51:10.112,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 383) [ns_server:debug,2014-08-19T16:51:10.113,ns_1@10.242.238.88:<0.18448.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [rebalance:info,2014-08-19T16:51:10.113,ns_1@10.242.238.88:<0.18447.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 383 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:10.113,ns_1@10.242.238.88:<0.18453.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 383 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:10.113,ns_1@10.242.238.88:<0.18454.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 383 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:10.118,ns_1@10.242.238.88:<0.18455.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 383 into 'ns_1@10.242.238.90' is <18125.26798.0> [ns_server:debug,2014-08-19T16:51:10.120,ns_1@10.242.238.88:<0.18455.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 383 into 'ns_1@10.242.238.89' is <18124.31704.0> [rebalance:debug,2014-08-19T16:51:10.120,ns_1@10.242.238.88:<0.18447.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 383 is <0.18455.1> [ns_server:debug,2014-08-19T16:51:10.151,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,136366}, tap_estimate, {replica_building,"default",383,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.26798.0>, <<"replication_building_383_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:10.157,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,148484}, tap_estimate, {replica_building,"default",383,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31704.0>, <<"replication_building_383_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:10.158,ns_1@10.242.238.88:<0.18456.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.31704.0>}, {'ns_1@10.242.238.90',<18125.26798.0>}]) [rebalance:info,2014-08-19T16:51:10.158,ns_1@10.242.238.88:<0.18447.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:51:10.159,ns_1@10.242.238.88:<0.18447.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 383 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:10.159,ns_1@10.242.238.88:<0.18447.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:10.160,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{383, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:51:10.162,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{894, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:10.163,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",894, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.18468.1>) [ns_server:debug,2014-08-19T16:51:10.163,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 894) [ns_server:debug,2014-08-19T16:51:10.163,ns_1@10.242.238.88:<0.18469.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:10.163,ns_1@10.242.238.88:<0.18469.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:51:10.163,ns_1@10.242.238.88:<0.18468.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 894 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:10.163,ns_1@10.242.238.88:<0.18474.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 894 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:10.163,ns_1@10.242.238.88:<0.18475.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 894 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [views:debug,2014-08-19T16:51:10.167,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/25. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:10.167,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",25,active,0} [ns_server:debug,2014-08-19T16:51:10.169,ns_1@10.242.238.88:<0.18476.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 894 into 'ns_1@10.242.238.89' is <18124.31710.0> [ns_server:debug,2014-08-19T16:51:10.171,ns_1@10.242.238.88:<0.18476.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 894 into 'ns_1@10.242.238.91' is <18126.27990.0> [rebalance:debug,2014-08-19T16:51:10.171,ns_1@10.242.238.88:<0.18468.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 894 is <0.18476.1> [ns_server:debug,2014-08-19T16:51:10.196,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,186976}, tap_estimate, {replica_building,"default",894,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31710.0>, <<"replication_building_894_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:10.206,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,197895}, tap_estimate, {replica_building,"default",894,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.27990.0>, <<"replication_building_894_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:51:10.207,ns_1@10.242.238.88:<0.18477.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.27990.0>}, {'ns_1@10.242.238.89',<18124.31710.0>}]) [rebalance:info,2014-08-19T16:51:10.207,ns_1@10.242.238.88:<0.18468.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:10.208,ns_1@10.242.238.88:<0.18468.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 894 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:10.208,ns_1@10.242.238.88:<0.18468.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:10.209,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{894, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:10.212,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{638, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:10.212,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",638, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.18489.1>) [ns_server:debug,2014-08-19T16:51:10.212,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 638) [ns_server:debug,2014-08-19T16:51:10.212,ns_1@10.242.238.88:<0.18490.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:10.212,ns_1@10.242.238.88:<0.18490.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:51:10.212,ns_1@10.242.238.88:<0.18489.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 638 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:10.213,ns_1@10.242.238.88:<0.18495.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 638 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:10.213,ns_1@10.242.238.88:<0.18496.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 638 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:10.217,ns_1@10.242.238.88:<0.18497.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 638 into 'ns_1@10.242.238.89' is <18124.31715.0> [ns_server:debug,2014-08-19T16:51:10.219,ns_1@10.242.238.88:<0.18497.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 638 into 'ns_1@10.242.238.90' is <18125.26817.0> [rebalance:debug,2014-08-19T16:51:10.219,ns_1@10.242.238.88:<0.18489.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 638 is <0.18497.1> [ns_server:debug,2014-08-19T16:51:10.242,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,233615}, tap_estimate, {replica_building,"default",638,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31715.0>, <<"replication_building_638_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:10.254,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,245545}, tap_estimate, {replica_building,"default",638,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.26817.0>, <<"replication_building_638_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:10.255,ns_1@10.242.238.88:<0.18498.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.26817.0>}, {'ns_1@10.242.238.89',<18124.31715.0>}]) [rebalance:info,2014-08-19T16:51:10.255,ns_1@10.242.238.88:<0.18489.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:51:10.255,ns_1@10.242.238.88:<0.18489.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 638 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:10.256,ns_1@10.242.238.88:<0.18489.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:10.257,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{638, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:10.261,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{382, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:51:10.261,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",382, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.18524.1>) [ns_server:debug,2014-08-19T16:51:10.261,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 382) [ns_server:debug,2014-08-19T16:51:10.262,ns_1@10.242.238.88:<0.18525.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:10.262,ns_1@10.242.238.88:<0.18525.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:51:10.262,ns_1@10.242.238.88:<0.18524.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 382 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:10.262,ns_1@10.242.238.88:<0.18530.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 382 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:10.262,ns_1@10.242.238.88:<0.18531.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 382 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:10.266,ns_1@10.242.238.88:<0.18532.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 382 into 'ns_1@10.242.238.90' is <18125.26823.0> [ns_server:debug,2014-08-19T16:51:10.269,ns_1@10.242.238.88:<0.18532.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 382 into 'ns_1@10.242.238.89' is <18124.31734.0> [rebalance:debug,2014-08-19T16:51:10.269,ns_1@10.242.238.88:<0.18524.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 382 is <0.18532.1> [ns_server:debug,2014-08-19T16:51:10.292,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,283919}, tap_estimate, {replica_building,"default",382,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.26823.0>, <<"replication_building_382_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:10.303,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,294625}, tap_estimate, {replica_building,"default",382,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31734.0>, <<"replication_building_382_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:10.304,ns_1@10.242.238.88:<0.18533.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.31734.0>}, {'ns_1@10.242.238.90',<18125.26823.0>}]) [rebalance:info,2014-08-19T16:51:10.304,ns_1@10.242.238.88:<0.18524.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:51:10.304,ns_1@10.242.238.88:<0.18524.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 382 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:10.305,ns_1@10.242.238.88:<0.18524.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:10.305,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{382, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:51:10.308,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{893, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:10.308,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",893, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.18545.1>) [ns_server:debug,2014-08-19T16:51:10.309,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 893) [ns_server:debug,2014-08-19T16:51:10.309,ns_1@10.242.238.88:<0.18546.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:10.309,ns_1@10.242.238.88:<0.18546.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:51:10.309,ns_1@10.242.238.88:<0.18545.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 893 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:10.309,ns_1@10.242.238.88:<0.18551.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 893 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:10.310,ns_1@10.242.238.88:<0.18552.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 893 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:10.313,ns_1@10.242.238.88:<0.18553.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 893 into 'ns_1@10.242.238.89' is <18124.31740.0> [ns_server:debug,2014-08-19T16:51:10.316,ns_1@10.242.238.88:<0.18553.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 893 into 'ns_1@10.242.238.91' is <18126.28010.0> [rebalance:debug,2014-08-19T16:51:10.316,ns_1@10.242.238.88:<0.18545.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 893 is <0.18553.1> [ns_server:debug,2014-08-19T16:51:10.341,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 23. Nacking mccouch update. [ns_server:debug,2014-08-19T16:51:10.341,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,332958}, tap_estimate, {replica_building,"default",893,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31740.0>, <<"replication_building_893_'ns_1@10.242.238.89'">>} [views:debug,2014-08-19T16:51:10.342,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/23. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:10.342,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",23,active,0} [ns_server:debug,2014-08-19T16:51:10.344,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,25,881,570,259,204,804, 749,438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931, 620,309,254,854,543,488,177,777,722,411,956,645,334,23,879,568,257,202,802, 747,436,125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929, 618,307,252,852,541,486,175,775,720,409,954,643,332,877,566,511,200,800,745, 434,123,979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927,616, 305,250,850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743,432, 121,977,666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614,303, 248,848,537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430,119, 975,664,353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301,246, 846,535,480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117,973, 662,351,896,585,274,219,819,764,453,142,998,687,65,376,921,610,299,244,844, 533,478,167,712,401,1023,946,635,324,997,869,686,558,503,375,192,920,792,737, 609,426,298,243,115,971,843,660,532,477,349,166,894,89,711,583,400,272,217, 1022,945,817,762,634,451,323,140,996,868,685,63,557,502,374,191,919,791,736, 608,425,297,242,114,970,842,659,531,476,37,348,165,893,710,582,399,271,216, 1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790,735,607, 424,296,241,113,969,841,658,530,475,347,164,892,87,709,581,398,270,215,1020, 943,815,760,632,449,321,138,994,866,683,61,555,500,372,189,917,789,734,606, 423,295,240,112,968,840,657,529,474,35,346,163,891,708,580,397,269,214,1019, 942,814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422, 294,239,111,967,839,656,528,473,345,162,890,85,707,579,396,268,213,1018,941, 813,758,630,447,319,136,992,864,681,59,553,498,370,187,915,787,732,604,421, 293,238,110,966,838,655,527,472,344,33,161,889,706,578,395,267,212,1017,940, 812,757,629,446,318,135,991,863,680,552,497,369,186,914,786,731,603,420,292, 237,109,965,837,654,526,471,343,160,888,83,705,577,394,266,211,1016,939,811, 756,628,445,317,134,990,862,679,57,551,496,368,185,913,785,730,602,419,291, 236,108,964,836,653,525,470,342,31,159,887,704,576,393,265,210,1015,938,810, 755,627,444,316,133,989,861,678,550,495,367,184,912,784,729,601,418,290,235, 107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937,809,754, 626,443,315,132,988,860,677,55,549,494,366,183,911,783,728,600,417,289,234, 962,834,651,523,468,340,29,157,885,702,574,391,263,208,1013,936,808,753,625, 442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105,961, 833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624,441, 313,130,986,858,675,547,53,492,364,181,909,781,726,598,415,287,232,960,832, 649,521,466,338,27,155,883,700,572,389,261,206,1011,934,806,751,623,440,312, 129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648, 520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673,51, 362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489, 178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,49, 360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487, 176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,47, 358,903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485, 174,97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,45, 356,901,590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483, 172,95,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,43, 354,899,588,277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481, 170,93,770,715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,41, 352,897,586,275,220,820,765,454,143,999,688,377,922,611,300,245,845,534,479, 168,91,768,713,402,947,636,325,870,559,504,193,793,738,427,116,972,661,39, 350,895,584,273,218,818,763,452,141] [ns_server:debug,2014-08-19T16:51:10.351,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,342601}, tap_estimate, {replica_building,"default",893,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.28010.0>, <<"replication_building_893_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:51:10.352,ns_1@10.242.238.88:<0.18554.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.28010.0>}, {'ns_1@10.242.238.89',<18124.31740.0>}]) [rebalance:info,2014-08-19T16:51:10.352,ns_1@10.242.238.88:<0.18545.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:10.353,ns_1@10.242.238.88:<0.18545.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 893 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:10.353,ns_1@10.242.238.88:<0.18545.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:10.354,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{893, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:10.357,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{637, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:10.357,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",637, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.18566.1>) [ns_server:debug,2014-08-19T16:51:10.357,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 637) [ns_server:debug,2014-08-19T16:51:10.358,ns_1@10.242.238.88:<0.18567.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:10.358,ns_1@10.242.238.88:<0.18567.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:51:10.358,ns_1@10.242.238.88:<0.18566.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 637 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:10.358,ns_1@10.242.238.88:<0.18572.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 637 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:10.358,ns_1@10.242.238.88:<0.18573.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 637 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:10.362,ns_1@10.242.238.88:<0.18574.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 637 into 'ns_1@10.242.238.89' is <18124.31745.0> [ns_server:debug,2014-08-19T16:51:10.365,ns_1@10.242.238.88:<0.18574.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 637 into 'ns_1@10.242.238.90' is <18125.26831.0> [rebalance:debug,2014-08-19T16:51:10.365,ns_1@10.242.238.88:<0.18566.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 637 is <0.18574.1> [ns_server:debug,2014-08-19T16:51:10.388,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,379575}, tap_estimate, {replica_building,"default",637,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31745.0>, <<"replication_building_637_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:10.399,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,390297}, tap_estimate, {replica_building,"default",637,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.26831.0>, <<"replication_building_637_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:10.399,ns_1@10.242.238.88:<0.18575.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.26831.0>}, {'ns_1@10.242.238.89',<18124.31745.0>}]) [rebalance:info,2014-08-19T16:51:10.399,ns_1@10.242.238.88:<0.18566.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:51:10.400,ns_1@10.242.238.88:<0.18566.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 637 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:10.401,ns_1@10.242.238.88:<0.18566.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:51:10.401,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/23. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:10.401,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",23,active,0} [ns_server:debug,2014-08-19T16:51:10.402,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{637, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:10.405,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{381, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:51:10.405,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",381, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.18587.1>) [ns_server:debug,2014-08-19T16:51:10.405,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 381) [ns_server:debug,2014-08-19T16:51:10.405,ns_1@10.242.238.88:<0.18588.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:10.406,ns_1@10.242.238.88:<0.18588.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:51:10.406,ns_1@10.242.238.88:<0.18587.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 381 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:10.406,ns_1@10.242.238.88:<0.18593.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 381 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:10.406,ns_1@10.242.238.88:<0.18594.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 381 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:10.410,ns_1@10.242.238.88:<0.18595.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 381 into 'ns_1@10.242.238.90' is <18125.26848.0> [ns_server:debug,2014-08-19T16:51:10.413,ns_1@10.242.238.88:<0.18595.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 381 into 'ns_1@10.242.238.89' is <18124.31764.0> [rebalance:debug,2014-08-19T16:51:10.413,ns_1@10.242.238.88:<0.18587.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 381 is <0.18595.1> [ns_server:debug,2014-08-19T16:51:10.436,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,427581}, tap_estimate, {replica_building,"default",381,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.26848.0>, <<"replication_building_381_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:10.449,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,440890}, tap_estimate, {replica_building,"default",381,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31764.0>, <<"replication_building_381_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:10.450,ns_1@10.242.238.88:<0.18596.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.31764.0>}, {'ns_1@10.242.238.90',<18125.26848.0>}]) [rebalance:info,2014-08-19T16:51:10.450,ns_1@10.242.238.88:<0.18587.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:51:10.450,ns_1@10.242.238.88:<0.18587.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 381 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:10.451,ns_1@10.242.238.88:<0.18587.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:10.452,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{381, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:51:10.454,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{892, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:10.454,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",892, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.18608.1>) [ns_server:debug,2014-08-19T16:51:10.455,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 892) [ns_server:debug,2014-08-19T16:51:10.455,ns_1@10.242.238.88:<0.18609.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:10.455,ns_1@10.242.238.88:<0.18609.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:51:10.455,ns_1@10.242.238.88:<0.18608.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 892 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:10.455,ns_1@10.242.238.88:<0.18614.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 892 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:10.455,ns_1@10.242.238.88:<0.18615.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 892 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:10.459,ns_1@10.242.238.88:<0.18616.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 892 into 'ns_1@10.242.238.89' is <18124.31770.0> [ns_server:debug,2014-08-19T16:51:10.462,ns_1@10.242.238.88:<0.18616.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 892 into 'ns_1@10.242.238.91' is <18126.28030.0> [rebalance:debug,2014-08-19T16:51:10.462,ns_1@10.242.238.88:<0.18608.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 892 is <0.18616.1> [ns_server:debug,2014-08-19T16:51:10.485,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,476296}, tap_estimate, {replica_building,"default",892,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31770.0>, <<"replication_building_892_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:10.498,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,489489}, tap_estimate, {replica_building,"default",892,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.28030.0>, <<"replication_building_892_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:51:10.498,ns_1@10.242.238.88:<0.18617.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.28030.0>}, {'ns_1@10.242.238.89',<18124.31770.0>}]) [rebalance:info,2014-08-19T16:51:10.499,ns_1@10.242.238.88:<0.18608.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:10.499,ns_1@10.242.238.88:<0.18608.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 892 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:10.500,ns_1@10.242.238.88:<0.18608.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:10.500,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{892, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:10.503,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{636, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:10.503,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",636, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.18643.1>) [ns_server:debug,2014-08-19T16:51:10.504,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 636) [ns_server:debug,2014-08-19T16:51:10.504,ns_1@10.242.238.88:<0.18644.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:10.504,ns_1@10.242.238.88:<0.18644.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:51:10.504,ns_1@10.242.238.88:<0.18643.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 636 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:10.504,ns_1@10.242.238.88:<0.18649.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 636 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:10.505,ns_1@10.242.238.88:<0.18650.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 636 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:10.508,ns_1@10.242.238.88:<0.18651.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 636 into 'ns_1@10.242.238.89' is <18124.31789.0> [ns_server:debug,2014-08-19T16:51:10.511,ns_1@10.242.238.88:<0.18651.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 636 into 'ns_1@10.242.238.90' is <18125.26853.0> [rebalance:debug,2014-08-19T16:51:10.511,ns_1@10.242.238.88:<0.18643.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 636 is <0.18651.1> [ns_server:debug,2014-08-19T16:51:10.534,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,525482}, tap_estimate, {replica_building,"default",636,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31789.0>, <<"replication_building_636_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:10.545,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,536541}, tap_estimate, {replica_building,"default",636,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.26853.0>, <<"replication_building_636_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:10.546,ns_1@10.242.238.88:<0.18652.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.26853.0>}, {'ns_1@10.242.238.89',<18124.31789.0>}]) [rebalance:info,2014-08-19T16:51:10.546,ns_1@10.242.238.88:<0.18643.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:51:10.546,ns_1@10.242.238.88:<0.18643.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 636 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:10.547,ns_1@10.242.238.88:<0.18643.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:10.547,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{636, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:10.550,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{380, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:51:10.550,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",380, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.18664.1>) [ns_server:debug,2014-08-19T16:51:10.551,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 380) [ns_server:debug,2014-08-19T16:51:10.551,ns_1@10.242.238.88:<0.18665.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:10.551,ns_1@10.242.238.88:<0.18665.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:51:10.551,ns_1@10.242.238.88:<0.18664.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 380 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:10.551,ns_1@10.242.238.88:<0.18670.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 380 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:10.551,ns_1@10.242.238.88:<0.18671.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 380 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:10.556,ns_1@10.242.238.88:<0.18672.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 380 into 'ns_1@10.242.238.90' is <18125.26873.0> [ns_server:debug,2014-08-19T16:51:10.558,ns_1@10.242.238.88:<0.18672.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 380 into 'ns_1@10.242.238.89' is <18124.31794.0> [rebalance:debug,2014-08-19T16:51:10.558,ns_1@10.242.238.88:<0.18664.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 380 is <0.18672.1> [ns_server:debug,2014-08-19T16:51:10.582,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,573436}, tap_estimate, {replica_building,"default",380,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.26873.0>, <<"replication_building_380_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:10.592,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 21. Nacking mccouch update. [views:debug,2014-08-19T16:51:10.592,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/21. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:10.593,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",21,active,0} [ns_server:debug,2014-08-19T16:51:10.595,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,586467}, tap_estimate, {replica_building,"default",380,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31794.0>, <<"replication_building_380_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:10.595,ns_1@10.242.238.88:<0.18673.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.31794.0>}, {'ns_1@10.242.238.90',<18125.26873.0>}]) [rebalance:info,2014-08-19T16:51:10.595,ns_1@10.242.238.88:<0.18664.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [ns_server:debug,2014-08-19T16:51:10.595,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,25,881,570,259,204,804, 749,438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931, 620,309,254,854,543,488,177,777,722,411,956,645,334,23,879,568,257,202,802, 747,436,125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929, 618,307,252,852,541,486,175,775,720,409,954,643,332,21,877,566,511,200,800, 745,434,123,979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927, 616,305,250,850,539,484,173,773,718,407,952,641,330,875,564,509,198,798,743, 432,121,977,666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925,614, 303,248,848,537,482,171,771,716,405,950,639,328,873,562,507,196,796,741,430, 119,975,664,353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612,301, 246,846,535,480,169,769,714,403,948,637,326,871,560,505,194,794,739,428,117, 973,662,351,896,585,274,219,819,764,453,142,998,687,65,376,921,610,299,244, 844,533,478,167,712,401,1023,946,635,324,997,869,686,558,503,375,192,920,792, 737,609,426,298,243,115,971,843,660,532,477,349,166,894,89,711,583,400,272, 217,1022,945,817,762,634,451,323,140,996,868,685,63,557,502,374,191,919,791, 736,608,425,297,242,114,970,842,659,531,476,37,348,165,893,710,582,399,271, 216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790,735, 607,424,296,241,113,969,841,658,530,475,347,164,892,87,709,581,398,270,215, 1020,943,815,760,632,449,321,138,994,866,683,61,555,500,372,189,917,789,734, 606,423,295,240,112,968,840,657,529,474,35,346,163,891,708,580,397,269,214, 1019,942,814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605, 422,294,239,111,967,839,656,528,473,345,162,890,85,707,579,396,268,213,1018, 941,813,758,630,447,319,136,992,864,681,59,553,498,370,187,915,787,732,604, 421,293,238,110,966,838,655,527,472,344,33,161,889,706,578,395,267,212,1017, 940,812,757,629,446,318,135,991,863,680,552,497,369,186,914,786,731,603,420, 292,237,109,965,837,654,526,471,343,160,888,83,705,577,394,266,211,1016,939, 811,756,628,445,317,134,990,862,679,57,551,496,368,185,913,785,730,602,419, 291,236,108,964,836,653,525,470,342,31,159,887,704,576,393,265,210,1015,938, 810,755,627,444,316,133,989,861,678,550,495,367,184,912,784,729,601,418,290, 235,107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937,809, 754,626,443,315,132,988,860,677,55,549,494,366,183,911,783,728,600,417,289, 234,962,834,651,523,468,340,29,157,885,702,574,391,263,208,1013,936,808,753, 625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105, 961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,53,492,364,181,909,781,726,598,415,287,232,960, 832,649,521,466,338,27,155,883,700,572,389,261,206,1011,934,806,751,623,440, 312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831, 648,520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673, 51,362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544, 489,178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671, 49,360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542, 487,176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669, 47,358,903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540, 485,174,97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667, 45,356,901,590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538, 483,172,95,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665, 43,354,899,588,277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536, 481,170,93,770,715,404,949,638,327,872,561,506,195,795,740,429,118,974,663, 41,352,897,586,275,220,820,765,454,143,999,688,377,922,611,300,245,845,534, 479,168,91,768,713,402,947,636,325,870,559,504,193,793,738,427,116,972,661, 39,350,895,584,273,218,818,763,452,141] [rebalance:info,2014-08-19T16:51:10.596,ns_1@10.242.238.88:<0.18664.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 380 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:10.597,ns_1@10.242.238.88:<0.18664.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:10.597,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{380, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:51:10.600,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{891, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:10.600,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",891, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.18685.1>) [ns_server:debug,2014-08-19T16:51:10.600,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 891) [ns_server:debug,2014-08-19T16:51:10.600,ns_1@10.242.238.88:<0.18686.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:10.601,ns_1@10.242.238.88:<0.18686.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:51:10.601,ns_1@10.242.238.88:<0.18685.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 891 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:10.601,ns_1@10.242.238.88:<0.18691.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 891 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:10.601,ns_1@10.242.238.88:<0.18692.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 891 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:10.605,ns_1@10.242.238.88:<0.18693.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 891 into 'ns_1@10.242.238.89' is <18124.31800.0> [ns_server:debug,2014-08-19T16:51:10.608,ns_1@10.242.238.88:<0.18693.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 891 into 'ns_1@10.242.238.91' is <18126.28042.0> [rebalance:debug,2014-08-19T16:51:10.608,ns_1@10.242.238.88:<0.18685.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 891 is <0.18693.1> [ns_server:debug,2014-08-19T16:51:10.631,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,622578}, tap_estimate, {replica_building,"default",891,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31800.0>, <<"replication_building_891_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:10.644,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,635333}, tap_estimate, {replica_building,"default",891,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.28042.0>, <<"replication_building_891_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:51:10.644,ns_1@10.242.238.88:<0.18694.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.28042.0>}, {'ns_1@10.242.238.89',<18124.31800.0>}]) [rebalance:info,2014-08-19T16:51:10.644,ns_1@10.242.238.88:<0.18685.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:10.645,ns_1@10.242.238.88:<0.18685.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 891 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:10.646,ns_1@10.242.238.88:<0.18685.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:10.646,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{891, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:10.649,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{635, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:10.649,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",635, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.18706.1>) [ns_server:debug,2014-08-19T16:51:10.649,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 635) [ns_server:debug,2014-08-19T16:51:10.650,ns_1@10.242.238.88:<0.18707.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:10.650,ns_1@10.242.238.88:<0.18707.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:51:10.650,ns_1@10.242.238.88:<0.18706.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 635 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:10.650,ns_1@10.242.238.88:<0.18712.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 635 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:10.650,ns_1@10.242.238.88:<0.18713.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 635 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:10.654,ns_1@10.242.238.88:<0.18714.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 635 into 'ns_1@10.242.238.89' is <18124.31819.0> [ns_server:debug,2014-08-19T16:51:10.656,ns_1@10.242.238.88:<0.18714.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 635 into 'ns_1@10.242.238.90' is <18125.26878.0> [rebalance:debug,2014-08-19T16:51:10.657,ns_1@10.242.238.88:<0.18706.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 635 is <0.18714.1> [views:debug,2014-08-19T16:51:10.676,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/21. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:10.676,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",21,active,0} [ns_server:debug,2014-08-19T16:51:10.681,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,672291}, tap_estimate, {replica_building,"default",635,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31819.0>, <<"replication_building_635_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:10.692,ns_1@10.242.238.88:<0.18715.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.26878.0>}, {'ns_1@10.242.238.89',<18124.31819.0>}]) [rebalance:info,2014-08-19T16:51:10.692,ns_1@10.242.238.88:<0.18706.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:51:10.693,ns_1@10.242.238.88:<0.18706.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 635 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:10.693,ns_1@10.242.238.88:<0.18706.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:10.694,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{635, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:10.696,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,683157}, tap_estimate, {replica_building,"default",635,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.26878.0>, <<"replication_building_635_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:10.697,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{379, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:51:10.697,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",379, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.18727.1>) [ns_server:debug,2014-08-19T16:51:10.697,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 379) [ns_server:debug,2014-08-19T16:51:10.697,ns_1@10.242.238.88:<0.18728.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:10.698,ns_1@10.242.238.88:<0.18728.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:51:10.698,ns_1@10.242.238.88:<0.18727.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 379 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:10.698,ns_1@10.242.238.88:<0.18733.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 379 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:10.698,ns_1@10.242.238.88:<0.18734.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 379 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:10.702,ns_1@10.242.238.88:<0.18735.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 379 into 'ns_1@10.242.238.90' is <18125.26898.0> [ns_server:debug,2014-08-19T16:51:10.705,ns_1@10.242.238.88:<0.18735.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 379 into 'ns_1@10.242.238.89' is <18124.31824.0> [rebalance:debug,2014-08-19T16:51:10.705,ns_1@10.242.238.88:<0.18727.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 379 is <0.18735.1> [ns_server:debug,2014-08-19T16:51:10.729,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,720574}, tap_estimate, {replica_building,"default",379,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.26898.0>, <<"replication_building_379_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:10.740,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,731127}, tap_estimate, {replica_building,"default",379,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31824.0>, <<"replication_building_379_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:10.740,ns_1@10.242.238.88:<0.18736.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.31824.0>}, {'ns_1@10.242.238.90',<18125.26898.0>}]) [rebalance:info,2014-08-19T16:51:10.740,ns_1@10.242.238.88:<0.18727.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:51:10.741,ns_1@10.242.238.88:<0.18727.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 379 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:10.741,ns_1@10.242.238.88:<0.18727.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:10.742,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{379, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:51:10.745,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{890, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:10.745,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",890, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.18748.1>) [ns_server:debug,2014-08-19T16:51:10.745,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 890) [ns_server:debug,2014-08-19T16:51:10.745,ns_1@10.242.238.88:<0.18749.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:10.745,ns_1@10.242.238.88:<0.18749.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:51:10.746,ns_1@10.242.238.88:<0.18748.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 890 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:10.746,ns_1@10.242.238.88:<0.18754.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 890 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:10.746,ns_1@10.242.238.88:<0.18755.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 890 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:10.750,ns_1@10.242.238.88:<0.18756.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 890 into 'ns_1@10.242.238.89' is <18124.31830.0> [ns_server:debug,2014-08-19T16:51:10.753,ns_1@10.242.238.88:<0.18756.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 890 into 'ns_1@10.242.238.91' is <18126.28062.0> [rebalance:debug,2014-08-19T16:51:10.753,ns_1@10.242.238.88:<0.18748.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 890 is <0.18756.1> [ns_server:debug,2014-08-19T16:51:10.776,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,767603}, tap_estimate, {replica_building,"default",890,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31830.0>, <<"replication_building_890_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:10.789,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,780456}, tap_estimate, {replica_building,"default",890,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.28062.0>, <<"replication_building_890_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:51:10.789,ns_1@10.242.238.88:<0.18757.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.28062.0>}, {'ns_1@10.242.238.89',<18124.31830.0>}]) [rebalance:info,2014-08-19T16:51:10.790,ns_1@10.242.238.88:<0.18748.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:10.790,ns_1@10.242.238.88:<0.18748.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 890 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:10.791,ns_1@10.242.238.88:<0.18748.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:10.791,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{890, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:10.794,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{634, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:10.794,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",634, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.18783.1>) [ns_server:debug,2014-08-19T16:51:10.794,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 634) [ns_server:debug,2014-08-19T16:51:10.795,ns_1@10.242.238.88:<0.18784.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:10.795,ns_1@10.242.238.88:<0.18784.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:51:10.795,ns_1@10.242.238.88:<0.18783.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 634 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:10.795,ns_1@10.242.238.88:<0.18789.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 634 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:10.795,ns_1@10.242.238.88:<0.18790.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 634 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:10.799,ns_1@10.242.238.88:<0.18791.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 634 into 'ns_1@10.242.238.89' is <18124.31849.0> [ns_server:debug,2014-08-19T16:51:10.802,ns_1@10.242.238.88:<0.18791.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 634 into 'ns_1@10.242.238.90' is <18125.26917.0> [rebalance:debug,2014-08-19T16:51:10.802,ns_1@10.242.238.88:<0.18783.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 634 is <0.18791.1> [ns_server:debug,2014-08-19T16:51:10.827,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,818731}, tap_estimate, {replica_building,"default",634,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31849.0>, <<"replication_building_634_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:10.836,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,827663}, tap_estimate, {replica_building,"default",634,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.26917.0>, <<"replication_building_634_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:10.837,ns_1@10.242.238.88:<0.18792.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.26917.0>}, {'ns_1@10.242.238.89',<18124.31849.0>}]) [rebalance:info,2014-08-19T16:51:10.837,ns_1@10.242.238.88:<0.18783.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:51:10.838,ns_1@10.242.238.88:<0.18783.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 634 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:10.838,ns_1@10.242.238.88:<0.18783.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:10.839,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{634, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:10.842,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{378, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:51:10.842,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",378, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.18804.1>) [ns_server:debug,2014-08-19T16:51:10.842,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 378) [ns_server:debug,2014-08-19T16:51:10.842,ns_1@10.242.238.88:<0.18805.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:10.843,ns_1@10.242.238.88:<0.18805.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:51:10.843,ns_1@10.242.238.88:<0.18804.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 378 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:10.843,ns_1@10.242.238.88:<0.18810.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 378 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:10.843,ns_1@10.242.238.88:<0.18811.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 378 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:10.847,ns_1@10.242.238.88:<0.18812.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 378 into 'ns_1@10.242.238.90' is <18125.26923.0> [ns_server:debug,2014-08-19T16:51:10.849,ns_1@10.242.238.88:<0.18812.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 378 into 'ns_1@10.242.238.89' is <18124.31854.0> [rebalance:debug,2014-08-19T16:51:10.849,ns_1@10.242.238.88:<0.18804.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 378 is <0.18812.1> [ns_server:debug,2014-08-19T16:51:10.851,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 19. Nacking mccouch update. [views:debug,2014-08-19T16:51:10.851,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/19. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:10.852,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",19,active,0} [ns_server:debug,2014-08-19T16:51:10.854,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,25,881,570,259,204,804, 749,438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931, 620,309,254,854,543,488,177,777,722,411,956,645,334,23,879,568,257,202,802, 747,436,125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929, 618,307,252,852,541,486,175,775,720,409,954,643,332,21,877,566,511,200,800, 745,434,123,979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927, 616,305,250,850,539,484,173,773,718,407,952,641,330,19,875,564,509,198,798, 743,432,121,977,666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925, 614,303,248,848,537,482,171,771,716,405,950,639,328,873,562,507,196,796,741, 430,119,975,664,353,898,587,276,221,821,766,455,144,689,67,378,1000,923,612, 301,246,846,535,480,169,769,714,403,948,637,326,871,560,505,194,794,739,428, 117,973,662,351,896,585,274,219,819,764,453,142,998,687,65,376,921,610,299, 244,844,533,478,167,712,401,1023,946,635,324,869,558,503,192,920,792,737,609, 426,298,243,115,971,843,660,532,477,349,166,894,89,711,583,400,272,217,1022, 945,817,762,634,451,323,140,996,868,685,63,557,502,374,191,919,791,736,608, 425,297,242,114,970,842,659,531,476,37,348,165,893,710,582,399,271,216,1021, 944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790,735,607,424, 296,241,113,969,841,658,530,475,347,164,892,87,709,581,398,270,215,1020,943, 815,760,632,449,321,138,994,866,683,61,555,500,372,189,917,789,734,606,423, 295,240,112,968,840,657,529,474,35,346,163,891,708,580,397,269,214,1019,942, 814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422,294, 239,111,967,839,656,528,473,345,162,890,85,707,579,396,268,213,1018,941,813, 758,630,447,319,136,992,864,681,59,553,498,370,187,915,787,732,604,421,293, 238,110,966,838,655,527,472,344,33,161,889,706,578,395,267,212,1017,940,812, 757,629,446,318,135,991,863,680,552,497,369,186,914,786,731,603,420,292,237, 109,965,837,654,526,471,343,160,888,83,705,577,394,266,211,1016,939,811,756, 628,445,317,134,990,862,679,57,551,496,368,185,913,785,730,602,419,291,236, 108,964,836,653,525,470,342,31,159,887,704,576,393,265,210,1015,938,810,755, 627,444,316,133,989,861,678,550,495,367,184,912,784,729,601,418,290,235,107, 963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937,809,754,626, 443,315,132,988,860,677,55,549,494,366,183,911,783,728,600,417,289,234,962, 834,651,523,468,340,29,157,885,702,574,391,263,208,1013,936,808,753,625,442, 314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105,961,833, 650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624,441,313, 130,986,858,675,547,53,492,364,181,909,781,726,598,415,287,232,960,832,649, 521,466,338,27,155,883,700,572,389,261,206,1011,934,806,751,623,440,312,129, 985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648,520, 465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673,51,362, 907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489,178, 778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,49,360, 905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487,176, 99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,47,358, 903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485,174, 97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,45,356, 901,590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483,172, 95,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,43,354, 899,588,277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481,170, 93,770,715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,41,352, 897,586,275,220,820,765,454,143,999,688,377,922,611,300,245,845,534,479,168, 91,768,713,402,947,636,325,870,559,504,193,793,738,427,116,972,661,39,350, 895,584,273,218,818,763,452,141,997,686,375] [ns_server:debug,2014-08-19T16:51:10.873,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,864851}, tap_estimate, {replica_building,"default",378,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.26923.0>, <<"replication_building_378_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:10.894,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,885038}, tap_estimate, {replica_building,"default",378,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31854.0>, <<"replication_building_378_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:10.894,ns_1@10.242.238.88:<0.18813.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.31854.0>}, {'ns_1@10.242.238.90',<18125.26923.0>}]) [rebalance:info,2014-08-19T16:51:10.894,ns_1@10.242.238.88:<0.18804.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:51:10.895,ns_1@10.242.238.88:<0.18804.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 378 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:10.895,ns_1@10.242.238.88:<0.18804.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:10.896,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{378, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:51:10.899,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{889, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:10.899,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",889, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.18825.1>) [ns_server:debug,2014-08-19T16:51:10.899,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 889) [ns_server:debug,2014-08-19T16:51:10.899,ns_1@10.242.238.88:<0.18826.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:10.899,ns_1@10.242.238.88:<0.18826.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:51:10.900,ns_1@10.242.238.88:<0.18825.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 889 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:10.900,ns_1@10.242.238.88:<0.18831.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 889 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:10.900,ns_1@10.242.238.88:<0.18832.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 889 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:10.904,ns_1@10.242.238.88:<0.18833.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 889 into 'ns_1@10.242.238.89' is <18124.31860.0> [ns_server:debug,2014-08-19T16:51:10.906,ns_1@10.242.238.88:<0.18833.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 889 into 'ns_1@10.242.238.91' is <18126.28083.0> [rebalance:debug,2014-08-19T16:51:10.907,ns_1@10.242.238.88:<0.18825.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 889 is <0.18833.1> [views:debug,2014-08-19T16:51:10.910,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/19. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:10.911,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",19,active,0} [ns_server:debug,2014-08-19T16:51:10.930,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,921503}, tap_estimate, {replica_building,"default",889,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31860.0>, <<"replication_building_889_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:10.944,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,935463}, tap_estimate, {replica_building,"default",889,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.28083.0>, <<"replication_building_889_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:51:10.945,ns_1@10.242.238.88:<0.18834.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.28083.0>}, {'ns_1@10.242.238.89',<18124.31860.0>}]) [rebalance:info,2014-08-19T16:51:10.945,ns_1@10.242.238.88:<0.18825.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:10.945,ns_1@10.242.238.88:<0.18825.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 889 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:10.946,ns_1@10.242.238.88:<0.18825.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:10.946,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{889, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:10.949,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{633, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:10.949,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",633, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.18846.1>) [ns_server:debug,2014-08-19T16:51:10.949,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 633) [ns_server:debug,2014-08-19T16:51:10.950,ns_1@10.242.238.88:<0.18847.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:10.950,ns_1@10.242.238.88:<0.18847.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:51:10.950,ns_1@10.242.238.88:<0.18846.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 633 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:10.950,ns_1@10.242.238.88:<0.18852.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 633 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:10.950,ns_1@10.242.238.88:<0.18853.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 633 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:10.954,ns_1@10.242.238.88:<0.18854.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 633 into 'ns_1@10.242.238.89' is <18124.31865.0> [ns_server:debug,2014-08-19T16:51:10.957,ns_1@10.242.238.88:<0.18854.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 633 into 'ns_1@10.242.238.90' is <18125.26942.0> [rebalance:debug,2014-08-19T16:51:10.957,ns_1@10.242.238.88:<0.18846.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 633 is <0.18854.1> [ns_server:debug,2014-08-19T16:51:10.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,972035}, tap_estimate, {replica_building,"default",633,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31865.0>, <<"replication_building_633_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:10.991,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452670,982852}, tap_estimate, {replica_building,"default",633,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.26942.0>, <<"replication_building_633_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:10.992,ns_1@10.242.238.88:<0.18855.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.26942.0>}, {'ns_1@10.242.238.89',<18124.31865.0>}]) [rebalance:info,2014-08-19T16:51:10.992,ns_1@10.242.238.88:<0.18846.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:51:10.993,ns_1@10.242.238.88:<0.18846.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 633 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:10.993,ns_1@10.242.238.88:<0.18846.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:10.994,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{633, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:10.997,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{377, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:51:10.997,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",377, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.18881.1>) [ns_server:debug,2014-08-19T16:51:10.997,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 377) [ns_server:debug,2014-08-19T16:51:10.997,ns_1@10.242.238.88:<0.18882.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:10.997,ns_1@10.242.238.88:<0.18882.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:51:10.997,ns_1@10.242.238.88:<0.18881.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 377 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:10.998,ns_1@10.242.238.88:<0.18887.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 377 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:10.998,ns_1@10.242.238.88:<0.18888.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 377 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:11.001,ns_1@10.242.238.88:<0.18889.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 377 into 'ns_1@10.242.238.90' is <18125.26948.0> [ns_server:debug,2014-08-19T16:51:11.004,ns_1@10.242.238.88:<0.18889.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 377 into 'ns_1@10.242.238.89' is <18124.31884.0> [rebalance:debug,2014-08-19T16:51:11.004,ns_1@10.242.238.88:<0.18881.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 377 is <0.18889.1> [ns_server:debug,2014-08-19T16:51:11.028,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,18984}, tap_estimate, {replica_building,"default",377,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.26948.0>, <<"replication_building_377_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:11.039,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,30013}, tap_estimate, {replica_building,"default",377,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31884.0>, <<"replication_building_377_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:11.039,ns_1@10.242.238.88:<0.18890.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.31884.0>}, {'ns_1@10.242.238.90',<18125.26948.0>}]) [rebalance:info,2014-08-19T16:51:11.039,ns_1@10.242.238.88:<0.18881.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:51:11.040,ns_1@10.242.238.88:<0.18881.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 377 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:11.040,ns_1@10.242.238.88:<0.18881.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:11.041,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{377, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:51:11.043,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{888, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:11.044,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",888, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.18902.1>) [ns_server:debug,2014-08-19T16:51:11.044,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 888) [ns_server:debug,2014-08-19T16:51:11.044,ns_1@10.242.238.88:<0.18903.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:11.044,ns_1@10.242.238.88:<0.18903.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:51:11.044,ns_1@10.242.238.88:<0.18902.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 888 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:11.045,ns_1@10.242.238.88:<0.18908.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 888 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:11.045,ns_1@10.242.238.88:<0.18909.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 888 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:11.048,ns_1@10.242.238.88:<0.18910.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 888 into 'ns_1@10.242.238.89' is <18124.31890.0> [ns_server:debug,2014-08-19T16:51:11.051,ns_1@10.242.238.88:<0.18910.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 888 into 'ns_1@10.242.238.91' is <18126.28089.0> [rebalance:debug,2014-08-19T16:51:11.051,ns_1@10.242.238.88:<0.18902.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 888 is <0.18910.1> [ns_server:debug,2014-08-19T16:51:11.060,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 17. Nacking mccouch update. [views:debug,2014-08-19T16:51:11.061,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/17. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:11.061,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",17,active,0} [ns_server:debug,2014-08-19T16:51:11.063,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,25,881,570,259,204,804, 749,438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931, 620,309,254,854,543,488,177,777,722,411,956,645,334,23,879,568,257,202,802, 747,436,125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929, 618,307,252,852,541,486,175,775,720,409,954,643,332,21,877,566,511,200,800, 745,434,123,979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927, 616,305,250,850,539,484,173,773,718,407,952,641,330,19,875,564,509,198,798, 743,432,121,977,666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925, 614,303,248,848,537,482,171,771,716,405,950,639,328,17,873,562,507,196,796, 741,430,119,975,664,353,898,587,276,221,821,766,455,144,689,67,378,1000,923, 612,301,246,846,535,480,169,769,714,403,948,637,326,871,560,505,194,794,739, 428,117,973,662,351,896,585,274,219,819,764,453,142,998,687,65,376,921,610, 299,244,844,533,478,167,712,401,1023,946,635,324,869,558,503,192,920,792,737, 609,426,298,243,115,971,843,660,532,477,349,166,894,89,711,583,400,272,217, 1022,945,817,762,634,451,323,140,996,868,685,63,557,502,374,191,919,791,736, 608,425,297,242,114,970,842,659,531,476,37,348,165,893,710,582,399,271,216, 1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790,735,607, 424,296,241,113,969,841,658,530,475,347,164,892,87,709,581,398,270,215,1020, 943,815,760,632,449,321,138,994,866,683,61,555,500,372,189,917,789,734,606, 423,295,240,112,968,840,657,529,474,35,346,163,891,708,580,397,269,214,1019, 942,814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422, 294,239,111,967,839,656,528,473,345,162,890,85,707,579,396,268,213,1018,941, 813,758,630,447,319,136,992,864,681,59,553,498,370,187,915,787,732,604,421, 293,238,110,966,838,655,527,472,344,33,161,889,706,578,395,267,212,1017,940, 812,757,629,446,318,135,991,863,680,552,497,369,186,914,786,731,603,420,292, 237,109,965,837,654,526,471,343,160,888,83,705,577,394,266,211,1016,939,811, 756,628,445,317,134,990,862,679,57,551,496,368,185,913,785,730,602,419,291, 236,108,964,836,653,525,470,342,31,159,887,704,576,393,265,210,1015,938,810, 755,627,444,316,133,989,861,678,550,495,367,184,912,784,729,601,418,290,235, 107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937,809,754, 626,443,315,132,988,860,677,55,549,494,366,183,911,783,728,600,417,289,234, 962,834,651,523,468,340,29,157,885,702,574,391,263,208,1013,936,808,753,625, 442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105,961, 833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624,441, 313,130,986,858,675,547,53,492,364,181,909,781,726,598,415,287,232,960,832, 649,521,466,338,27,155,883,700,572,389,261,206,1011,934,806,751,623,440,312, 129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648, 520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673,51, 362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489, 178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,49, 360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487, 176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,47, 358,903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485, 174,97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,45, 356,901,590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483, 172,95,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,43, 354,899,588,277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481, 170,93,770,715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,41, 352,897,586,275,220,820,765,454,143,999,688,377,922,611,300,245,845,534,479, 168,91,768,713,402,947,636,325,870,559,504,193,793,738,427,116,972,661,39, 350,895,584,273,218,818,763,452,141,997,686,375] [ns_server:debug,2014-08-19T16:51:11.079,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,68232}, tap_estimate, {replica_building,"default",888,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31890.0>, <<"replication_building_888_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:11.087,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,78610}, tap_estimate, {replica_building,"default",888,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.28089.0>, <<"replication_building_888_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:51:11.088,ns_1@10.242.238.88:<0.18911.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.28089.0>}, {'ns_1@10.242.238.89',<18124.31890.0>}]) [rebalance:info,2014-08-19T16:51:11.088,ns_1@10.242.238.88:<0.18902.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:11.088,ns_1@10.242.238.88:<0.18902.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 888 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:11.089,ns_1@10.242.238.88:<0.18902.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:11.089,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{888, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:11.092,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{632, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:11.092,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",632, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.18923.1>) [ns_server:debug,2014-08-19T16:51:11.092,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 632) [ns_server:debug,2014-08-19T16:51:11.093,ns_1@10.242.238.88:<0.18924.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:11.093,ns_1@10.242.238.88:<0.18924.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:51:11.093,ns_1@10.242.238.88:<0.18923.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 632 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:11.093,ns_1@10.242.238.88:<0.18929.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 632 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:11.093,ns_1@10.242.238.88:<0.18930.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 632 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:11.097,ns_1@10.242.238.88:<0.18931.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 632 into 'ns_1@10.242.238.89' is <18124.31895.0> [ns_server:debug,2014-08-19T16:51:11.099,ns_1@10.242.238.88:<0.18931.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 632 into 'ns_1@10.242.238.90' is <18125.26953.0> [rebalance:debug,2014-08-19T16:51:11.099,ns_1@10.242.238.88:<0.18923.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 632 is <0.18931.1> [views:debug,2014-08-19T16:51:11.119,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/17. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:11.120,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",17,active,0} [ns_server:debug,2014-08-19T16:51:11.124,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,114962}, tap_estimate, {replica_building,"default",632,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31895.0>, <<"replication_building_632_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:11.135,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,126033}, tap_estimate, {replica_building,"default",632,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.26953.0>, <<"replication_building_632_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:11.135,ns_1@10.242.238.88:<0.18932.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.26953.0>}, {'ns_1@10.242.238.89',<18124.31895.0>}]) [rebalance:info,2014-08-19T16:51:11.135,ns_1@10.242.238.88:<0.18923.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:51:11.136,ns_1@10.242.238.88:<0.18923.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 632 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:11.136,ns_1@10.242.238.88:<0.18923.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:11.137,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{632, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:11.139,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{376, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:51:11.140,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",376, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.18944.1>) [ns_server:debug,2014-08-19T16:51:11.140,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 376) [ns_server:debug,2014-08-19T16:51:11.140,ns_1@10.242.238.88:<0.18945.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:11.140,ns_1@10.242.238.88:<0.18945.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:51:11.140,ns_1@10.242.238.88:<0.18944.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 376 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:11.141,ns_1@10.242.238.88:<0.18950.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 376 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:11.141,ns_1@10.242.238.88:<0.18951.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 376 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:11.144,ns_1@10.242.238.88:<0.18952.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 376 into 'ns_1@10.242.238.90' is <18125.26973.0> [ns_server:debug,2014-08-19T16:51:11.147,ns_1@10.242.238.88:<0.18952.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 376 into 'ns_1@10.242.238.89' is <18124.31900.0> [rebalance:debug,2014-08-19T16:51:11.147,ns_1@10.242.238.88:<0.18944.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 376 is <0.18952.1> [ns_server:debug,2014-08-19T16:51:11.170,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,161908}, tap_estimate, {replica_building,"default",376,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.26973.0>, <<"replication_building_376_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:11.182,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,173369}, tap_estimate, {replica_building,"default",376,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31900.0>, <<"replication_building_376_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:11.182,ns_1@10.242.238.88:<0.18953.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.31900.0>}, {'ns_1@10.242.238.90',<18125.26973.0>}]) [rebalance:info,2014-08-19T16:51:11.183,ns_1@10.242.238.88:<0.18944.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:51:11.183,ns_1@10.242.238.88:<0.18944.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 376 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:11.184,ns_1@10.242.238.88:<0.18944.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:11.184,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{376, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:51:11.187,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{887, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:11.187,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",887, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.18970.1>) [ns_server:debug,2014-08-19T16:51:11.187,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 887) [ns_server:debug,2014-08-19T16:51:11.188,ns_1@10.242.238.88:<0.18971.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:11.188,ns_1@10.242.238.88:<0.18971.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:51:11.188,ns_1@10.242.238.88:<0.18970.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 887 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:11.188,ns_1@10.242.238.88:<0.18976.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 887 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:11.188,ns_1@10.242.238.88:<0.18977.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 887 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:11.192,ns_1@10.242.238.88:<0.18978.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 887 into 'ns_1@10.242.238.89' is <18124.31906.0> [ns_server:debug,2014-08-19T16:51:11.195,ns_1@10.242.238.88:<0.18978.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 887 into 'ns_1@10.242.238.91' is <18126.28109.0> [rebalance:debug,2014-08-19T16:51:11.195,ns_1@10.242.238.88:<0.18970.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 887 is <0.18978.1> [ns_server:debug,2014-08-19T16:51:11.221,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,212661}, tap_estimate, {replica_building,"default",887,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31906.0>, <<"replication_building_887_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:11.230,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,221894}, tap_estimate, {replica_building,"default",887,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.28109.0>, <<"replication_building_887_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:51:11.231,ns_1@10.242.238.88:<0.18979.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.28109.0>}, {'ns_1@10.242.238.89',<18124.31906.0>}]) [rebalance:info,2014-08-19T16:51:11.231,ns_1@10.242.238.88:<0.18970.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:11.232,ns_1@10.242.238.88:<0.18970.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 887 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:11.232,ns_1@10.242.238.88:<0.18970.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:11.233,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{887, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:11.236,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{631, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:11.236,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",631, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.19005.1>) [ns_server:debug,2014-08-19T16:51:11.236,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 631) [ns_server:debug,2014-08-19T16:51:11.236,ns_1@10.242.238.88:<0.19006.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:11.236,ns_1@10.242.238.88:<0.19006.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:51:11.237,ns_1@10.242.238.88:<0.19005.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 631 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:11.237,ns_1@10.242.238.88:<0.19011.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 631 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:11.237,ns_1@10.242.238.88:<0.19012.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 631 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:11.240,ns_1@10.242.238.88:<0.19013.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 631 into 'ns_1@10.242.238.89' is <18124.31925.0> [ns_server:debug,2014-08-19T16:51:11.243,ns_1@10.242.238.88:<0.19013.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 631 into 'ns_1@10.242.238.90' is <18125.26978.0> [rebalance:debug,2014-08-19T16:51:11.243,ns_1@10.242.238.88:<0.19005.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 631 is <0.19013.1> [ns_server:debug,2014-08-19T16:51:11.267,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,258234}, tap_estimate, {replica_building,"default",631,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31925.0>, <<"replication_building_631_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:11.277,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 15. Nacking mccouch update. [views:debug,2014-08-19T16:51:11.277,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/15. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:11.278,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",15,active,0} [ns_server:debug,2014-08-19T16:51:11.278,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,269161}, tap_estimate, {replica_building,"default",631,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.26978.0>, <<"replication_building_631_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:11.278,ns_1@10.242.238.88:<0.19014.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.26978.0>}, {'ns_1@10.242.238.89',<18124.31925.0>}]) [rebalance:info,2014-08-19T16:51:11.278,ns_1@10.242.238.88:<0.19005.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:51:11.279,ns_1@10.242.238.88:<0.19005.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 631 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:11.280,ns_1@10.242.238.88:<0.19005.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:11.280,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{631, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:11.281,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,25,881,570,259,204,804, 749,438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931, 620,309,254,854,543,488,177,777,722,411,956,645,334,23,879,568,257,202,802, 747,436,125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929, 618,307,252,852,541,486,175,775,720,409,954,643,332,21,877,566,511,200,800, 745,434,123,979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927, 616,305,250,850,539,484,173,773,718,407,952,641,330,19,875,564,509,198,798, 743,432,121,977,666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925, 614,303,248,848,537,482,171,771,716,405,950,639,328,17,873,562,507,196,796, 741,430,119,975,664,353,898,587,276,221,821,766,455,144,689,67,378,1000,923, 612,301,246,846,535,480,169,769,714,403,948,637,326,15,871,560,505,194,794, 739,428,117,973,662,351,896,585,274,219,819,764,453,142,998,687,65,376,921, 610,299,244,844,533,478,167,712,401,1023,946,635,324,869,558,503,192,920,792, 737,609,426,298,243,115,971,843,660,532,477,349,166,894,89,711,583,400,272, 217,1022,945,817,762,634,451,323,140,996,868,685,63,557,502,374,191,919,791, 736,608,425,297,242,114,970,842,659,531,476,37,348,165,893,710,582,399,271, 216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790,735, 607,424,296,241,113,969,841,658,530,475,347,164,892,87,709,581,398,270,215, 1020,943,815,760,632,449,321,138,994,866,683,61,555,500,372,189,917,789,734, 606,423,295,240,112,968,840,657,529,474,35,346,163,891,708,580,397,269,214, 1019,942,814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605, 422,294,239,111,967,839,656,528,473,345,162,890,85,707,579,396,268,213,1018, 941,813,758,630,447,319,136,992,864,681,59,553,498,370,187,915,787,732,604, 421,293,238,110,966,838,655,527,472,344,33,161,889,706,578,395,267,212,1017, 940,812,757,629,446,318,135,991,863,680,552,497,369,186,914,786,731,603,420, 292,237,109,965,837,654,526,471,343,160,888,83,705,577,394,266,211,1016,939, 811,756,628,445,317,134,990,862,679,57,551,496,368,185,913,785,730,602,419, 291,236,108,964,836,653,525,470,342,31,159,887,704,576,393,265,210,1015,938, 810,755,627,444,316,133,989,861,678,550,495,367,184,912,784,729,601,418,290, 235,107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937,809, 754,626,443,315,132,988,860,677,55,549,494,366,183,911,783,728,600,417,289, 234,962,834,651,523,468,340,29,157,885,702,574,391,263,208,1013,936,808,753, 625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105, 961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,53,492,364,181,909,781,726,598,415,287,232,960, 832,649,521,466,338,27,155,883,700,572,389,261,206,1011,934,806,751,623,440, 312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831, 648,520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673, 51,362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544, 489,178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671, 49,360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542, 487,176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669, 47,358,903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540, 485,174,97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667, 45,356,901,590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538, 483,172,95,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665, 43,354,899,588,277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536, 481,170,93,770,715,404,949,638,327,872,561,506,195,795,740,429,118,974,663, 41,352,897,586,275,220,820,765,454,143,999,688,377,922,611,300,245,845,534, 479,168,91,768,713,402,947,636,325,870,559,504,193,793,738,427,116,972,661, 39,350,895,584,273,218,818,763,452,141,997,686,375] [ns_server:debug,2014-08-19T16:51:11.283,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{375, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:51:11.283,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",375, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.19026.1>) [ns_server:debug,2014-08-19T16:51:11.283,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 375) [ns_server:debug,2014-08-19T16:51:11.284,ns_1@10.242.238.88:<0.19027.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:11.284,ns_1@10.242.238.88:<0.19027.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:51:11.284,ns_1@10.242.238.88:<0.19026.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 375 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:11.284,ns_1@10.242.238.88:<0.19032.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 375 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:11.284,ns_1@10.242.238.88:<0.19033.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 375 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:11.288,ns_1@10.242.238.88:<0.19034.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 375 into 'ns_1@10.242.238.90' is <18125.26998.0> [ns_server:debug,2014-08-19T16:51:11.290,ns_1@10.242.238.88:<0.19034.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 375 into 'ns_1@10.242.238.89' is <18124.31930.0> [rebalance:debug,2014-08-19T16:51:11.290,ns_1@10.242.238.88:<0.19026.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 375 is <0.19034.1> [ns_server:debug,2014-08-19T16:51:11.314,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,305907}, tap_estimate, {replica_building,"default",375,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.26998.0>, <<"replication_building_375_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:11.326,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,317615}, tap_estimate, {replica_building,"default",375,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31930.0>, <<"replication_building_375_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:11.327,ns_1@10.242.238.88:<0.19035.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.31930.0>}, {'ns_1@10.242.238.90',<18125.26998.0>}]) [rebalance:info,2014-08-19T16:51:11.327,ns_1@10.242.238.88:<0.19026.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:51:11.327,ns_1@10.242.238.88:<0.19026.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 375 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:11.328,ns_1@10.242.238.88:<0.19026.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:51:11.328,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/15. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:11.328,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",15,active,0} [ns_server:debug,2014-08-19T16:51:11.328,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{375, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:51:11.331,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{886, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:11.331,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",886, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.19047.1>) [ns_server:debug,2014-08-19T16:51:11.331,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 886) [ns_server:debug,2014-08-19T16:51:11.332,ns_1@10.242.238.88:<0.19048.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:11.332,ns_1@10.242.238.88:<0.19048.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:51:11.332,ns_1@10.242.238.88:<0.19047.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 886 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:11.332,ns_1@10.242.238.88:<0.19053.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 886 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:11.332,ns_1@10.242.238.88:<0.19054.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 886 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:11.336,ns_1@10.242.238.88:<0.19055.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 886 into 'ns_1@10.242.238.89' is <18124.31936.0> [ns_server:debug,2014-08-19T16:51:11.339,ns_1@10.242.238.88:<0.19055.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 886 into 'ns_1@10.242.238.91' is <18126.28115.0> [rebalance:debug,2014-08-19T16:51:11.339,ns_1@10.242.238.88:<0.19047.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 886 is <0.19055.1> [ns_server:debug,2014-08-19T16:51:11.364,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,355281}, tap_estimate, {replica_building,"default",886,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31936.0>, <<"replication_building_886_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:11.374,ns_1@10.242.238.88:<0.19056.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.28115.0>}, {'ns_1@10.242.238.89',<18124.31936.0>}]) [ns_server:debug,2014-08-19T16:51:11.373,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,364531}, tap_estimate, {replica_building,"default",886,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.28115.0>, <<"replication_building_886_'ns_1@10.242.238.91'">>} [rebalance:info,2014-08-19T16:51:11.374,ns_1@10.242.238.88:<0.19047.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:11.375,ns_1@10.242.238.88:<0.19047.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 886 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:11.375,ns_1@10.242.238.88:<0.19047.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:11.376,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{886, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:11.378,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{630, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:11.379,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",630, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.19090.1>) [ns_server:debug,2014-08-19T16:51:11.379,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 630) [ns_server:debug,2014-08-19T16:51:11.379,ns_1@10.242.238.88:<0.19091.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:11.379,ns_1@10.242.238.88:<0.19091.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:51:11.379,ns_1@10.242.238.88:<0.19090.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 630 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:11.380,ns_1@10.242.238.88:<0.19096.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 630 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:11.380,ns_1@10.242.238.88:<0.19097.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 630 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:11.383,ns_1@10.242.238.88:<0.19098.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 630 into 'ns_1@10.242.238.89' is <18124.31941.0> [ns_server:debug,2014-08-19T16:51:11.386,ns_1@10.242.238.88:<0.19098.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 630 into 'ns_1@10.242.238.90' is <18125.27003.0> [rebalance:debug,2014-08-19T16:51:11.386,ns_1@10.242.238.88:<0.19090.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 630 is <0.19098.1> [ns_server:debug,2014-08-19T16:51:11.411,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,402088}, tap_estimate, {replica_building,"default",630,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31941.0>, <<"replication_building_630_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:11.420,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 13. Nacking mccouch update. [views:debug,2014-08-19T16:51:11.420,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/13. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:11.420,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",13,active,0} [ns_server:debug,2014-08-19T16:51:11.421,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,412435}, tap_estimate, {replica_building,"default",630,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27003.0>, <<"replication_building_630_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:11.422,ns_1@10.242.238.88:<0.19099.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.27003.0>}, {'ns_1@10.242.238.89',<18124.31941.0>}]) [rebalance:info,2014-08-19T16:51:11.422,ns_1@10.242.238.88:<0.19090.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:51:11.422,ns_1@10.242.238.88:<0.19090.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 630 on ns_1@10.242.238.88 [ns_server:debug,2014-08-19T16:51:11.422,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,25,881,570,259,204,804, 749,438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931, 620,309,254,854,543,488,177,777,722,411,956,645,334,23,879,568,257,202,802, 747,436,125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929, 618,307,252,852,541,486,175,775,720,409,954,643,332,21,877,566,511,200,800, 745,434,123,979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927, 616,305,250,850,539,484,173,773,718,407,952,641,330,19,875,564,509,198,798, 743,432,121,977,666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925, 614,303,248,848,537,482,171,771,716,405,950,639,328,17,873,562,507,196,796, 741,430,119,975,664,353,898,587,276,221,821,766,455,144,689,67,378,1000,923, 612,301,246,846,535,480,169,769,714,403,948,637,326,15,871,560,505,194,794, 739,428,117,973,662,351,896,585,274,219,819,764,453,142,998,687,65,376,921, 610,299,244,844,533,478,167,712,401,1023,946,635,324,13,869,558,503,192,920, 792,737,609,426,298,243,115,971,843,660,532,477,349,166,894,89,711,583,400, 272,217,1022,945,817,762,634,451,323,140,996,868,685,63,557,502,374,191,919, 791,736,608,425,297,242,114,970,842,659,531,476,37,348,165,893,710,582,399, 271,216,1021,944,816,761,633,450,322,139,995,867,684,556,501,373,190,918,790, 735,607,424,296,241,113,969,841,658,530,475,347,164,892,87,709,581,398,270, 215,1020,943,815,760,632,449,321,138,994,866,683,61,555,500,372,189,917,789, 734,606,423,295,240,112,968,840,657,529,474,35,346,163,891,708,580,397,269, 214,1019,942,814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733, 605,422,294,239,111,967,839,656,528,473,345,162,890,85,707,579,396,268,213, 1018,941,813,758,630,447,319,136,992,864,681,59,553,498,370,187,915,787,732, 604,421,293,238,110,966,838,655,527,472,344,33,161,889,706,578,395,267,212, 1017,940,812,757,629,446,318,135,991,863,680,552,497,369,186,914,786,731,603, 420,292,237,109,965,837,654,526,471,343,160,888,83,705,577,394,266,211,1016, 939,811,756,628,445,317,134,990,862,679,57,551,496,368,185,913,785,730,602, 419,291,236,108,964,836,653,525,470,342,31,159,887,704,576,393,265,210,1015, 938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784,729,601,418, 290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937, 809,754,626,443,315,132,988,860,677,55,549,494,366,183,911,783,728,600,417, 289,234,962,834,651,523,468,340,29,157,885,702,574,391,263,208,1013,936,808, 753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233, 105,961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752, 624,441,313,130,986,858,675,547,53,492,364,181,909,781,726,598,415,287,232, 960,832,649,521,466,338,27,155,883,700,572,389,261,206,1011,934,806,751,623, 440,312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959, 831,648,520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984, 673,51,362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855, 544,489,178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982, 671,49,360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853, 542,487,176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980, 669,47,358,903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851, 540,485,174,97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978, 667,45,356,901,590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849, 538,483,172,95,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976, 665,43,354,899,588,277,222,822,767,456,145,690,379,1001,924,613,302,247,847, 536,481,170,93,770,715,404,949,638,327,872,561,506,195,795,740,429,118,974, 663,41,352,897,586,275,220,820,765,454,143,999,688,377,922,611,300,245,845, 534,479,168,91,768,713,402,947,636,325,870,559,504,193,793,738,427,116,972, 661,39,350,895,584,273,218,818,763,452,141,997,686,375] [rebalance:info,2014-08-19T16:51:11.423,ns_1@10.242.238.88:<0.19090.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:11.424,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{630, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:11.426,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{374, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:51:11.427,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",374, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.19111.1>) [ns_server:debug,2014-08-19T16:51:11.427,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 374) [ns_server:debug,2014-08-19T16:51:11.427,ns_1@10.242.238.88:<0.19112.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:11.427,ns_1@10.242.238.88:<0.19112.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:51:11.427,ns_1@10.242.238.88:<0.19111.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 374 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:11.428,ns_1@10.242.238.88:<0.19117.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 374 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:11.428,ns_1@10.242.238.88:<0.19118.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 374 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:11.431,ns_1@10.242.238.88:<0.19119.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 374 into 'ns_1@10.242.238.90' is <18125.27023.0> [ns_server:debug,2014-08-19T16:51:11.434,ns_1@10.242.238.88:<0.19119.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 374 into 'ns_1@10.242.238.89' is <18124.31960.0> [rebalance:debug,2014-08-19T16:51:11.434,ns_1@10.242.238.88:<0.19111.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 374 is <0.19119.1> [ns_server:debug,2014-08-19T16:51:11.457,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,448827}, tap_estimate, {replica_building,"default",374,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27023.0>, <<"replication_building_374_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:11.471,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,462130}, tap_estimate, {replica_building,"default",374,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31960.0>, <<"replication_building_374_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:11.471,ns_1@10.242.238.88:<0.19122.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.31960.0>}, {'ns_1@10.242.238.90',<18125.27023.0>}]) [rebalance:info,2014-08-19T16:51:11.471,ns_1@10.242.238.88:<0.19111.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:51:11.472,ns_1@10.242.238.88:<0.19111.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 374 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:11.472,ns_1@10.242.238.88:<0.19111.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:11.473,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{374, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:51:11.476,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{885, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:11.476,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",885, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.19135.1>) [ns_server:debug,2014-08-19T16:51:11.476,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 885) [ns_server:debug,2014-08-19T16:51:11.477,ns_1@10.242.238.88:<0.19136.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:11.477,ns_1@10.242.238.88:<0.19136.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:51:11.477,ns_1@10.242.238.88:<0.19135.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 885 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:11.477,ns_1@10.242.238.88:<0.19141.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 885 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:11.477,ns_1@10.242.238.88:<0.19142.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 885 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:11.481,ns_1@10.242.238.88:<0.19143.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 885 into 'ns_1@10.242.238.89' is <18124.31966.0> [ns_server:debug,2014-08-19T16:51:11.484,ns_1@10.242.238.88:<0.19143.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 885 into 'ns_1@10.242.238.91' is <18126.28135.0> [rebalance:debug,2014-08-19T16:51:11.484,ns_1@10.242.238.88:<0.19135.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 885 is <0.19143.1> [views:debug,2014-08-19T16:51:11.487,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/13. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:11.488,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",13,active,0} [ns_server:debug,2014-08-19T16:51:11.507,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,498474}, tap_estimate, {replica_building,"default",885,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31966.0>, <<"replication_building_885_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:11.519,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,510129}, tap_estimate, {replica_building,"default",885,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.28135.0>, <<"replication_building_885_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:51:11.519,ns_1@10.242.238.88:<0.19144.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.28135.0>}, {'ns_1@10.242.238.89',<18124.31966.0>}]) [rebalance:info,2014-08-19T16:51:11.519,ns_1@10.242.238.88:<0.19135.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:11.520,ns_1@10.242.238.88:<0.19135.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 885 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:11.521,ns_1@10.242.238.88:<0.19135.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:11.521,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{885, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:11.524,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{629, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:11.524,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",629, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.19161.1>) [ns_server:debug,2014-08-19T16:51:11.524,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 629) [ns_server:debug,2014-08-19T16:51:11.525,ns_1@10.242.238.88:<0.19162.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:11.525,ns_1@10.242.238.88:<0.19162.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:51:11.525,ns_1@10.242.238.88:<0.19161.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 629 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:11.525,ns_1@10.242.238.88:<0.19170.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 629 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:11.525,ns_1@10.242.238.88:<0.19171.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 629 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:11.530,ns_1@10.242.238.88:<0.19172.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 629 into 'ns_1@10.242.238.89' is <18124.31971.0> [ns_server:debug,2014-08-19T16:51:11.532,ns_1@10.242.238.88:<0.19172.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 629 into 'ns_1@10.242.238.90' is <18125.27034.0> [rebalance:debug,2014-08-19T16:51:11.532,ns_1@10.242.238.88:<0.19161.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 629 is <0.19172.1> [ns_server:debug,2014-08-19T16:51:11.557,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,548305}, tap_estimate, {replica_building,"default",629,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31971.0>, <<"replication_building_629_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:11.568,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,559473}, tap_estimate, {replica_building,"default",629,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27034.0>, <<"replication_building_629_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:11.569,ns_1@10.242.238.88:<0.19179.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.27034.0>}, {'ns_1@10.242.238.89',<18124.31971.0>}]) [rebalance:info,2014-08-19T16:51:11.569,ns_1@10.242.238.88:<0.19161.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:51:11.570,ns_1@10.242.238.88:<0.19161.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 629 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:11.570,ns_1@10.242.238.88:<0.19161.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:11.571,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{629, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:11.573,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{373, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:51:11.573,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",373, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.19191.1>) [ns_server:debug,2014-08-19T16:51:11.574,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 373) [ns_server:debug,2014-08-19T16:51:11.574,ns_1@10.242.238.88:<0.19192.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:11.574,ns_1@10.242.238.88:<0.19192.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:51:11.574,ns_1@10.242.238.88:<0.19191.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 373 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:11.574,ns_1@10.242.238.88:<0.19197.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 373 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:11.574,ns_1@10.242.238.88:<0.19198.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 373 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:11.578,ns_1@10.242.238.88:<0.19199.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 373 into 'ns_1@10.242.238.90' is <18125.27054.0> [ns_server:debug,2014-08-19T16:51:11.579,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 11. Nacking mccouch update. [views:debug,2014-08-19T16:51:11.579,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/11. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:11.579,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",11,active,0} [ns_server:debug,2014-08-19T16:51:11.581,ns_1@10.242.238.88:<0.19199.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 373 into 'ns_1@10.242.238.89' is <18124.31990.0> [rebalance:debug,2014-08-19T16:51:11.581,ns_1@10.242.238.88:<0.19191.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 373 is <0.19199.1> [ns_server:debug,2014-08-19T16:51:11.581,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,25,881,570,259,204,804, 749,438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931, 620,309,254,854,543,488,177,777,722,411,956,645,334,23,879,568,257,202,802, 747,436,125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929, 618,307,252,852,541,486,175,775,720,409,954,643,332,21,877,566,511,200,800, 745,434,123,979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927, 616,305,250,850,539,484,173,773,718,407,952,641,330,19,875,564,509,198,798, 743,432,121,977,666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925, 614,303,248,848,537,482,171,771,716,405,950,639,328,17,873,562,507,196,796, 741,430,119,975,664,353,898,587,276,221,821,766,455,144,689,67,378,1000,923, 612,301,246,846,535,480,169,769,714,403,948,637,326,15,871,560,505,194,794, 739,428,117,973,662,351,896,585,274,219,819,764,453,142,998,687,65,376,921, 610,299,244,844,533,478,167,712,401,1023,946,635,324,13,869,558,503,192,920, 792,737,609,426,298,243,115,971,843,660,532,477,349,166,894,89,711,583,400, 272,217,1022,945,817,762,634,451,323,140,996,868,685,63,557,502,374,191,919, 791,736,608,425,297,242,114,970,842,659,531,476,37,348,165,893,710,582,399, 271,216,1021,944,816,761,633,450,322,139,11,995,867,684,556,501,373,190,918, 790,735,607,424,296,241,113,969,841,658,530,475,347,164,892,87,709,581,398, 270,215,1020,943,815,760,632,449,321,138,994,866,683,61,555,500,372,189,917, 789,734,606,423,295,240,112,968,840,657,529,474,35,346,163,891,708,580,397, 269,214,1019,942,814,759,631,448,320,137,993,865,682,554,499,371,188,916,788, 733,605,422,294,239,111,967,839,656,528,473,345,162,890,85,707,579,396,268, 213,1018,941,813,758,630,447,319,136,992,864,681,59,553,498,370,187,915,787, 732,604,421,293,238,110,966,838,655,527,472,344,33,161,889,706,578,395,267, 212,1017,940,812,757,629,446,318,135,991,863,680,552,497,369,186,914,786,731, 603,420,292,237,109,965,837,654,526,471,343,160,888,83,705,577,394,266,211, 1016,939,811,756,628,445,317,134,990,862,679,57,551,496,368,185,913,785,730, 602,419,291,236,108,964,836,653,525,470,342,31,159,887,704,576,393,265,210, 1015,938,810,755,627,444,316,133,989,861,678,550,495,367,184,912,784,729,601, 418,290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014, 937,809,754,626,443,315,132,988,860,677,55,549,494,366,183,911,783,728,600, 417,289,234,962,834,651,523,468,340,29,157,885,702,574,391,263,208,1013,936, 808,753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288, 233,105,961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807, 752,624,441,313,130,986,858,675,547,53,492,364,181,909,781,726,598,415,287, 232,960,832,649,521,466,338,27,155,883,700,572,389,261,206,1011,934,806,751, 623,440,312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103, 959,831,648,520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128, 984,673,51,362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255, 855,544,489,178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126, 982,671,49,360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253, 853,542,487,176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124, 980,669,47,358,903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251, 851,540,485,174,97,774,719,408,953,642,331,876,565,510,199,799,744,433,122, 978,667,45,356,901,590,279,224,824,513,458,147,692,381,1003,926,615,304,249, 849,538,483,172,95,772,717,406,951,640,329,874,563,508,197,797,742,431,120, 976,665,43,354,899,588,277,222,822,767,456,145,690,379,1001,924,613,302,247, 847,536,481,170,93,770,715,404,949,638,327,872,561,506,195,795,740,429,118, 974,663,41,352,897,586,275,220,820,765,454,143,999,688,377,922,611,300,245, 845,534,479,168,91,768,713,402,947,636,325,870,559,504,193,793,738,427,116, 972,661,39,350,895,584,273,218,818,763,452,141,997,686,375] [ns_server:debug,2014-08-19T16:51:11.606,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,597529}, tap_estimate, {replica_building,"default",373,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27054.0>, <<"replication_building_373_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:11.619,ns_1@10.242.238.88:<0.19200.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.31990.0>}, {'ns_1@10.242.238.90',<18125.27054.0>}]) [rebalance:info,2014-08-19T16:51:11.619,ns_1@10.242.238.88:<0.19191.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:51:11.619,ns_1@10.242.238.88:<0.19191.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 373 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:11.620,ns_1@10.242.238.88:<0.19191.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:11.620,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{373, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:51:11.624,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{884, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:11.624,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",884, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.19213.1>) [ns_server:debug,2014-08-19T16:51:11.624,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,609715}, tap_estimate, {replica_building,"default",373,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31990.0>, <<"replication_building_373_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:11.624,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 884) [ns_server:debug,2014-08-19T16:51:11.624,ns_1@10.242.238.88:<0.19214.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:11.624,ns_1@10.242.238.88:<0.19214.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:51:11.625,ns_1@10.242.238.88:<0.19213.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 884 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:11.625,ns_1@10.242.238.88:<0.19219.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 884 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:11.625,ns_1@10.242.238.88:<0.19220.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 884 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [views:debug,2014-08-19T16:51:11.630,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/11. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:11.630,ns_1@10.242.238.88:<0.19221.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 884 into 'ns_1@10.242.238.89' is <18124.31996.0> [ns_server:debug,2014-08-19T16:51:11.630,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",11,active,0} [ns_server:debug,2014-08-19T16:51:11.633,ns_1@10.242.238.88:<0.19221.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 884 into 'ns_1@10.242.238.91' is <18126.28166.0> [rebalance:debug,2014-08-19T16:51:11.633,ns_1@10.242.238.88:<0.19213.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 884 is <0.19221.1> [ns_server:debug,2014-08-19T16:51:11.658,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,648994}, tap_estimate, {replica_building,"default",884,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.31996.0>, <<"replication_building_884_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:11.671,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,662953}, tap_estimate, {replica_building,"default",884,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.28166.0>, <<"replication_building_884_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:51:11.672,ns_1@10.242.238.88:<0.19222.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.28166.0>}, {'ns_1@10.242.238.89',<18124.31996.0>}]) [rebalance:info,2014-08-19T16:51:11.672,ns_1@10.242.238.88:<0.19213.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:11.673,ns_1@10.242.238.88:<0.19213.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 884 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:11.673,ns_1@10.242.238.88:<0.19213.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:11.674,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{884, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:11.677,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{628, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:11.677,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",628, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.19248.1>) [ns_server:debug,2014-08-19T16:51:11.677,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 628) [ns_server:debug,2014-08-19T16:51:11.678,ns_1@10.242.238.88:<0.19249.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:11.678,ns_1@10.242.238.88:<0.19249.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:51:11.678,ns_1@10.242.238.88:<0.19248.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 628 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:11.678,ns_1@10.242.238.88:<0.19254.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 628 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:11.678,ns_1@10.242.238.88:<0.19255.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 628 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:11.682,ns_1@10.242.238.88:<0.19256.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 628 into 'ns_1@10.242.238.89' is <18124.32001.0> [ns_server:debug,2014-08-19T16:51:11.685,ns_1@10.242.238.88:<0.19256.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 628 into 'ns_1@10.242.238.90' is <18125.27073.0> [rebalance:debug,2014-08-19T16:51:11.685,ns_1@10.242.238.88:<0.19248.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 628 is <0.19256.1> [ns_server:debug,2014-08-19T16:51:11.708,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,699612}, tap_estimate, {replica_building,"default",628,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32001.0>, <<"replication_building_628_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:11.721,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 9. Nacking mccouch update. [views:debug,2014-08-19T16:51:11.721,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/9. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:11.722,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",9,active,0} [ns_server:debug,2014-08-19T16:51:11.722,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,713081}, tap_estimate, {replica_building,"default",628,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27073.0>, <<"replication_building_628_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:11.722,ns_1@10.242.238.88:<0.19257.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.27073.0>}, {'ns_1@10.242.238.89',<18124.32001.0>}]) [rebalance:info,2014-08-19T16:51:11.722,ns_1@10.242.238.88:<0.19248.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:51:11.723,ns_1@10.242.238.88:<0.19248.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 628 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:11.724,ns_1@10.242.238.88:<0.19248.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:11.724,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{628, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:11.724,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,25,881,570,259,204,804, 749,438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931, 620,309,254,854,543,488,177,777,722,411,956,645,334,23,879,568,257,202,802, 747,436,125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929, 618,307,252,852,541,486,175,775,720,409,954,643,332,21,877,566,511,200,800, 745,434,123,979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927, 616,305,250,850,539,484,173,773,718,407,952,641,330,19,875,564,509,198,798, 743,432,121,977,666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925, 614,303,248,848,537,482,171,771,716,405,950,639,328,17,873,562,507,196,796, 741,430,119,975,664,353,898,587,276,221,821,766,455,144,689,67,378,1000,923, 612,301,246,846,535,480,169,769,714,403,948,637,326,15,871,560,505,194,794, 739,428,117,973,662,351,896,585,274,219,819,764,453,142,998,687,65,376,921, 610,299,244,844,533,478,167,712,401,1023,946,635,324,13,869,558,503,192,792, 737,426,115,971,843,660,532,477,349,166,894,89,711,583,400,272,217,1022,945, 817,762,634,451,323,140,996,868,685,63,557,502,374,191,919,791,736,608,425, 297,242,114,970,842,659,531,476,37,348,165,893,710,582,399,271,216,1021,944, 816,761,633,450,322,139,11,995,867,684,556,501,373,190,918,790,735,607,424, 296,241,113,969,841,658,530,475,347,164,892,87,709,581,398,270,215,1020,943, 815,760,632,449,321,138,994,866,683,61,555,500,372,189,917,789,734,606,423, 295,240,112,968,840,657,529,474,35,346,163,891,708,580,397,269,214,1019,942, 9,814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422, 294,239,111,967,839,656,528,473,345,162,890,85,707,579,396,268,213,1018,941, 813,758,630,447,319,136,992,864,681,59,553,498,370,187,915,787,732,604,421, 293,238,110,966,838,655,527,472,344,33,161,889,706,578,395,267,212,1017,940, 812,757,629,446,318,135,991,863,680,552,497,369,186,914,786,731,603,420,292, 237,109,965,837,654,526,471,343,160,888,83,705,577,394,266,211,1016,939,811, 756,628,445,317,134,990,862,679,57,551,496,368,185,913,785,730,602,419,291, 236,108,964,836,653,525,470,342,31,159,887,704,576,393,265,210,1015,938,810, 755,627,444,316,133,989,861,678,550,495,367,184,912,784,729,601,418,290,235, 107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937,809,754, 626,443,315,132,988,860,677,55,549,494,366,183,911,783,728,600,417,289,234, 962,834,651,523,468,340,29,157,885,702,574,391,263,208,1013,936,808,753,625, 442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105,961, 833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624,441, 313,130,986,858,675,547,53,492,364,181,909,781,726,598,415,287,232,960,832, 649,521,466,338,27,155,883,700,572,389,261,206,1011,934,806,751,623,440,312, 129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831,648, 520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673,51, 362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544,489, 178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671,49, 360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542,487, 176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669,47, 358,903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540,485, 174,97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667,45, 356,901,590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538,483, 172,95,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665,43, 354,899,588,277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536,481, 170,93,770,715,404,949,638,327,872,561,506,195,795,740,429,118,974,663,41, 352,897,586,275,220,820,765,454,143,999,688,377,922,611,300,245,845,534,479, 168,91,768,713,402,947,636,325,870,559,504,193,793,738,427,116,972,661,39, 350,895,584,273,218,818,763,452,141,997,686,375,920,609,298,243] [ns_server:debug,2014-08-19T16:51:11.727,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{372, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:51:11.727,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",372, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.19269.1>) [ns_server:debug,2014-08-19T16:51:11.727,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 372) [ns_server:debug,2014-08-19T16:51:11.728,ns_1@10.242.238.88:<0.19270.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:11.728,ns_1@10.242.238.88:<0.19270.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:51:11.728,ns_1@10.242.238.88:<0.19269.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 372 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:11.728,ns_1@10.242.238.88:<0.19275.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 372 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:11.728,ns_1@10.242.238.88:<0.19276.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 372 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:11.732,ns_1@10.242.238.88:<0.19277.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 372 into 'ns_1@10.242.238.90' is <18125.27079.0> [ns_server:debug,2014-08-19T16:51:11.734,ns_1@10.242.238.88:<0.19277.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 372 into 'ns_1@10.242.238.89' is <18124.32006.0> [rebalance:debug,2014-08-19T16:51:11.734,ns_1@10.242.238.88:<0.19269.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 372 is <0.19277.1> [ns_server:debug,2014-08-19T16:51:11.760,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,751047}, tap_estimate, {replica_building,"default",372,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27079.0>, <<"replication_building_372_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:11.771,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,762521}, tap_estimate, {replica_building,"default",372,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32006.0>, <<"replication_building_372_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:11.771,ns_1@10.242.238.88:<0.19278.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.32006.0>}, {'ns_1@10.242.238.90',<18125.27079.0>}]) [rebalance:info,2014-08-19T16:51:11.772,ns_1@10.242.238.88:<0.19269.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:51:11.772,ns_1@10.242.238.88:<0.19269.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 372 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:11.773,ns_1@10.242.238.88:<0.19269.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:11.773,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{372, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:51:11.776,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{883, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:11.776,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",883, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.19290.1>) [ns_server:debug,2014-08-19T16:51:11.776,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 883) [ns_server:debug,2014-08-19T16:51:11.777,ns_1@10.242.238.88:<0.19291.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:11.777,ns_1@10.242.238.88:<0.19291.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:51:11.777,ns_1@10.242.238.88:<0.19290.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 883 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:11.777,ns_1@10.242.238.88:<0.19296.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 883 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:11.777,ns_1@10.242.238.88:<0.19297.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 883 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [views:debug,2014-08-19T16:51:11.780,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/9. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:11.781,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",9,active,0} [ns_server:debug,2014-08-19T16:51:11.781,ns_1@10.242.238.88:<0.19298.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 883 into 'ns_1@10.242.238.89' is <18124.32026.0> [ns_server:debug,2014-08-19T16:51:11.784,ns_1@10.242.238.88:<0.19298.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 883 into 'ns_1@10.242.238.91' is <18126.28186.0> [rebalance:debug,2014-08-19T16:51:11.784,ns_1@10.242.238.88:<0.19290.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 883 is <0.19298.1> [ns_server:debug,2014-08-19T16:51:11.807,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,798943}, tap_estimate, {replica_building,"default",883,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32026.0>, <<"replication_building_883_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:11.820,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,811239}, tap_estimate, {replica_building,"default",883,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.28186.0>, <<"replication_building_883_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:51:11.820,ns_1@10.242.238.88:<0.19299.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.28186.0>}, {'ns_1@10.242.238.89',<18124.32026.0>}]) [rebalance:info,2014-08-19T16:51:11.820,ns_1@10.242.238.88:<0.19290.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:11.821,ns_1@10.242.238.88:<0.19290.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 883 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:11.822,ns_1@10.242.238.88:<0.19290.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:11.822,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{883, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:11.825,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{627, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:11.826,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",627, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.19325.1>) [ns_server:debug,2014-08-19T16:51:11.826,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 627) [ns_server:debug,2014-08-19T16:51:11.826,ns_1@10.242.238.88:<0.19326.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:11.826,ns_1@10.242.238.88:<0.19326.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:51:11.826,ns_1@10.242.238.88:<0.19325.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 627 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:11.827,ns_1@10.242.238.88:<0.19331.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 627 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:11.827,ns_1@10.242.238.88:<0.19332.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 627 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:11.830,ns_1@10.242.238.88:<0.19333.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 627 into 'ns_1@10.242.238.89' is <18124.32031.0> [ns_server:debug,2014-08-19T16:51:11.833,ns_1@10.242.238.88:<0.19333.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 627 into 'ns_1@10.242.238.90' is <18125.27098.0> [rebalance:debug,2014-08-19T16:51:11.833,ns_1@10.242.238.88:<0.19325.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 627 is <0.19333.1> [ns_server:debug,2014-08-19T16:51:11.857,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,848823}, tap_estimate, {replica_building,"default",627,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32031.0>, <<"replication_building_627_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:11.869,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,860751}, tap_estimate, {replica_building,"default",627,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27098.0>, <<"replication_building_627_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:11.870,ns_1@10.242.238.88:<0.19334.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.27098.0>}, {'ns_1@10.242.238.89',<18124.32031.0>}]) [rebalance:info,2014-08-19T16:51:11.870,ns_1@10.242.238.88:<0.19325.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:51:11.870,ns_1@10.242.238.88:<0.19325.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 627 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:11.871,ns_1@10.242.238.88:<0.19325.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:11.871,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{627, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:11.874,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{371, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:51:11.874,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",371, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.19346.1>) [ns_server:debug,2014-08-19T16:51:11.875,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 371) [ns_server:debug,2014-08-19T16:51:11.875,ns_1@10.242.238.88:<0.19347.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:11.875,ns_1@10.242.238.88:<0.19347.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:51:11.875,ns_1@10.242.238.88:<0.19346.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 371 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:11.875,ns_1@10.242.238.88:<0.19352.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 371 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:11.875,ns_1@10.242.238.88:<0.19353.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 371 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:11.880,ns_1@10.242.238.88:<0.19354.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 371 into 'ns_1@10.242.238.90' is <18125.27104.0> [ns_server:debug,2014-08-19T16:51:11.880,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 7. Nacking mccouch update. [views:debug,2014-08-19T16:51:11.881,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/7. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:11.881,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",7,active,0} [ns_server:debug,2014-08-19T16:51:11.883,ns_1@10.242.238.88:<0.19354.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 371 into 'ns_1@10.242.238.89' is <18124.32042.0> [rebalance:debug,2014-08-19T16:51:11.883,ns_1@10.242.238.88:<0.19346.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 371 is <0.19354.1> [ns_server:debug,2014-08-19T16:51:11.883,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,25,881,570,259,204,804, 749,438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931, 620,309,254,854,543,488,177,777,722,411,956,645,334,23,879,568,257,202,802, 747,436,125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929, 618,307,252,852,541,486,175,775,720,409,954,643,332,21,877,566,511,200,800, 745,434,123,979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927, 616,305,250,850,539,484,173,773,718,407,952,641,330,19,875,564,509,198,798, 743,432,121,977,666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925, 614,303,248,848,537,482,171,771,716,405,950,639,328,17,873,562,507,196,796, 741,430,119,975,664,353,898,587,276,221,821,766,455,144,689,67,378,1000,923, 612,301,246,846,535,480,169,769,714,403,948,637,326,15,871,560,505,194,794, 739,428,117,973,662,351,896,585,274,219,819,764,453,142,998,687,65,376,921, 610,299,244,844,533,478,167,712,401,1023,946,635,324,13,869,558,503,192,792, 737,426,115,971,843,660,532,477,349,166,894,89,711,583,400,272,217,1022,945, 817,762,634,451,323,140,996,868,685,63,557,502,374,191,919,791,736,608,425, 297,242,114,970,842,659,531,476,37,348,165,893,710,582,399,271,216,1021,944, 816,761,633,450,322,139,11,995,867,684,556,501,373,190,918,790,735,607,424, 296,241,113,969,841,658,530,475,347,164,892,87,709,581,398,270,215,1020,943, 815,760,632,449,321,138,994,866,683,61,555,500,372,189,917,789,734,606,423, 295,240,112,968,840,657,529,474,35,346,163,891,708,580,397,269,214,1019,942, 9,814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422, 294,239,111,967,839,656,528,473,345,162,890,85,707,579,396,268,213,1018,941, 813,758,630,447,319,136,992,864,681,59,553,498,370,187,915,787,732,604,421, 293,238,110,966,838,655,527,472,344,33,161,889,706,578,395,267,212,1017,940, 812,757,7,629,446,318,135,991,863,680,552,497,369,186,914,786,731,603,420, 292,237,109,965,837,654,526,471,343,160,888,83,705,577,394,266,211,1016,939, 811,756,628,445,317,134,990,862,679,57,551,496,368,185,913,785,730,602,419, 291,236,108,964,836,653,525,470,342,31,159,887,704,576,393,265,210,1015,938, 810,755,627,444,316,133,989,861,678,550,495,367,184,912,784,729,601,418,290, 235,107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937,809, 754,626,443,315,132,988,860,677,55,549,494,366,183,911,783,728,600,417,289, 234,962,834,651,523,468,340,29,157,885,702,574,391,263,208,1013,936,808,753, 625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233,105, 961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752,624, 441,313,130,986,858,675,547,53,492,364,181,909,781,726,598,415,287,232,960, 832,649,521,466,338,27,155,883,700,572,389,261,206,1011,934,806,751,623,440, 312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959,831, 648,520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984,673, 51,362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855,544, 489,178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982,671, 49,360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853,542, 487,176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980,669, 47,358,903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851,540, 485,174,97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978,667, 45,356,901,590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849,538, 483,172,95,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976,665, 43,354,899,588,277,222,822,767,456,145,690,379,1001,924,613,302,247,847,536, 481,170,93,770,715,404,949,638,327,872,561,506,195,795,740,429,118,974,663, 41,352,897,586,275,220,820,765,454,143,999,688,377,922,611,300,245,845,534, 479,168,91,768,713,402,947,636,325,870,559,504,193,793,738,427,116,972,661, 39,350,895,584,273,218,818,763,452,141,997,686,375,920,609,298,243] [ns_server:debug,2014-08-19T16:51:11.906,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,897724}, tap_estimate, {replica_building,"default",371,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27104.0>, <<"replication_building_371_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:11.921,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,912894}, tap_estimate, {replica_building,"default",371,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32042.0>, <<"replication_building_371_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:11.922,ns_1@10.242.238.88:<0.19355.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.32042.0>}, {'ns_1@10.242.238.90',<18125.27104.0>}]) [rebalance:info,2014-08-19T16:51:11.922,ns_1@10.242.238.88:<0.19346.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:51:11.923,ns_1@10.242.238.88:<0.19346.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 371 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:11.923,ns_1@10.242.238.88:<0.19346.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:11.924,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{371, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:51:11.926,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{882, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:11.926,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",882, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.19367.1>) [ns_server:debug,2014-08-19T16:51:11.927,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 882) [ns_server:debug,2014-08-19T16:51:11.927,ns_1@10.242.238.88:<0.19368.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:11.927,ns_1@10.242.238.88:<0.19368.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:51:11.927,ns_1@10.242.238.88:<0.19367.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 882 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:11.927,ns_1@10.242.238.88:<0.19373.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 882 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:11.927,ns_1@10.242.238.88:<0.19374.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 882 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:11.931,ns_1@10.242.238.88:<0.19375.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 882 into 'ns_1@10.242.238.89' is <18124.32062.0> [ns_server:debug,2014-08-19T16:51:11.934,ns_1@10.242.238.88:<0.19375.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 882 into 'ns_1@10.242.238.91' is <18126.28206.0> [rebalance:debug,2014-08-19T16:51:11.934,ns_1@10.242.238.88:<0.19367.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 882 is <0.19375.1> [ns_server:debug,2014-08-19T16:51:11.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,952105}, tap_estimate, {replica_building,"default",882,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32062.0>, <<"replication_building_882_'ns_1@10.242.238.89'">>} [views:debug,2014-08-19T16:51:11.970,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/7. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:11.970,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",7,active,0} [ns_server:debug,2014-08-19T16:51:11.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,963906}, tap_estimate, {replica_building,"default",882,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.28206.0>, <<"replication_building_882_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:51:11.973,ns_1@10.242.238.88:<0.19376.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.28206.0>}, {'ns_1@10.242.238.89',<18124.32062.0>}]) [rebalance:info,2014-08-19T16:51:11.973,ns_1@10.242.238.88:<0.19367.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:11.974,ns_1@10.242.238.88:<0.19367.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 882 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:11.974,ns_1@10.242.238.88:<0.19367.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:11.975,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{882, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:11.977,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{626, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:11.978,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",626, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.19388.1>) [ns_server:debug,2014-08-19T16:51:11.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 626) [ns_server:debug,2014-08-19T16:51:11.978,ns_1@10.242.238.88:<0.19389.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:11.979,ns_1@10.242.238.88:<0.19389.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:51:11.979,ns_1@10.242.238.88:<0.19388.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 626 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:11.979,ns_1@10.242.238.88:<0.19394.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 626 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:11.979,ns_1@10.242.238.88:<0.19395.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 626 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:11.983,ns_1@10.242.238.88:<0.19396.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 626 into 'ns_1@10.242.238.89' is <18124.32067.0> [ns_server:debug,2014-08-19T16:51:11.985,ns_1@10.242.238.88:<0.19396.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 626 into 'ns_1@10.242.238.90' is <18125.27123.0> [rebalance:debug,2014-08-19T16:51:11.986,ns_1@10.242.238.88:<0.19388.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 626 is <0.19396.1> [ns_server:debug,2014-08-19T16:51:12.008,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452671,999812}, tap_estimate, {replica_building,"default",626,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32067.0>, <<"replication_building_626_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:12.021,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,12558}, tap_estimate, {replica_building,"default",626,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27123.0>, <<"replication_building_626_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:12.022,ns_1@10.242.238.88:<0.19397.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.27123.0>}, {'ns_1@10.242.238.89',<18124.32067.0>}]) [rebalance:info,2014-08-19T16:51:12.022,ns_1@10.242.238.88:<0.19388.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:51:12.022,ns_1@10.242.238.88:<0.19388.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 626 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:12.023,ns_1@10.242.238.88:<0.19388.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:12.023,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{626, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:12.026,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{370, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:51:12.026,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",370, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.19409.1>) [ns_server:debug,2014-08-19T16:51:12.026,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 370) [ns_server:debug,2014-08-19T16:51:12.026,ns_1@10.242.238.88:<0.19410.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:12.027,ns_1@10.242.238.88:<0.19410.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:51:12.027,ns_1@10.242.238.88:<0.19409.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 370 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:12.027,ns_1@10.242.238.88:<0.19415.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 370 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:12.027,ns_1@10.242.238.88:<0.19416.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 370 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:12.031,ns_1@10.242.238.88:<0.19417.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 370 into 'ns_1@10.242.238.90' is <18125.27129.0> [ns_server:debug,2014-08-19T16:51:12.033,ns_1@10.242.238.88:<0.19417.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 370 into 'ns_1@10.242.238.89' is <18124.32072.0> [rebalance:debug,2014-08-19T16:51:12.033,ns_1@10.242.238.88:<0.19409.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 370 is <0.19417.1> [ns_server:debug,2014-08-19T16:51:12.058,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,48990}, tap_estimate, {replica_building,"default",370,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27129.0>, <<"replication_building_370_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:12.070,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,61096}, tap_estimate, {replica_building,"default",370,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32072.0>, <<"replication_building_370_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:12.070,ns_1@10.242.238.88:<0.19418.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.32072.0>}, {'ns_1@10.242.238.90',<18125.27129.0>}]) [rebalance:info,2014-08-19T16:51:12.070,ns_1@10.242.238.88:<0.19409.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:51:12.071,ns_1@10.242.238.88:<0.19409.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 370 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:12.071,ns_1@10.242.238.88:<0.19409.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:12.072,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{370, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:51:12.074,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{881, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:12.075,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",881, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.19444.1>) [ns_server:debug,2014-08-19T16:51:12.075,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 881) [ns_server:debug,2014-08-19T16:51:12.075,ns_1@10.242.238.88:<0.19445.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:12.075,ns_1@10.242.238.88:<0.19445.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:51:12.075,ns_1@10.242.238.88:<0.19444.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 881 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:12.076,ns_1@10.242.238.88:<0.19450.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 881 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:12.076,ns_1@10.242.238.88:<0.19451.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 881 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:12.079,ns_1@10.242.238.88:<0.19452.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 881 into 'ns_1@10.242.238.89' is <18124.32078.0> [ns_server:debug,2014-08-19T16:51:12.082,ns_1@10.242.238.88:<0.19452.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 881 into 'ns_1@10.242.238.91' is <18126.28226.0> [rebalance:debug,2014-08-19T16:51:12.082,ns_1@10.242.238.88:<0.19444.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 881 is <0.19452.1> [ns_server:debug,2014-08-19T16:51:12.108,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,98977}, tap_estimate, {replica_building,"default",881,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32078.0>, <<"replication_building_881_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:12.112,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 5. Nacking mccouch update. [views:debug,2014-08-19T16:51:12.112,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/5. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:12.112,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",5,active,0} [ns_server:debug,2014-08-19T16:51:12.114,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,25,881,570,259,204,804, 749,438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931, 620,309,254,854,543,488,177,777,722,411,956,645,334,23,879,568,257,202,802, 747,436,125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929, 618,307,252,852,541,486,175,775,720,409,954,643,332,21,877,566,511,200,800, 745,434,123,979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927, 616,305,250,850,539,484,173,773,718,407,952,641,330,19,875,564,509,198,798, 743,432,121,977,666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925, 614,303,248,848,537,482,171,771,716,405,950,639,328,17,873,562,507,196,796, 741,430,119,975,664,353,898,587,276,221,821,766,455,144,689,67,378,1000,923, 612,301,246,846,535,480,169,769,714,403,948,637,326,15,871,560,505,194,794, 739,428,117,973,662,351,896,585,274,219,819,764,453,142,998,687,65,376,921, 610,299,244,844,533,478,167,712,401,1023,946,635,324,13,869,558,503,192,792, 737,426,115,971,843,660,532,477,349,166,894,89,711,583,400,272,217,1022,945, 817,762,634,451,323,140,996,868,685,63,557,502,374,191,919,791,736,608,425, 297,242,114,970,842,659,531,476,37,348,165,893,710,582,399,271,216,1021,944, 816,761,633,450,322,139,11,995,867,684,556,501,373,190,918,790,735,607,424, 296,241,113,969,841,658,530,475,347,164,892,87,709,581,398,270,215,1020,943, 815,760,632,449,321,138,994,866,683,61,555,500,372,189,917,789,734,606,423, 295,240,112,968,840,657,529,474,35,346,163,891,708,580,397,269,214,1019,942, 9,814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422, 294,239,111,967,839,656,528,473,345,162,890,85,707,579,396,268,213,1018,941, 813,758,630,447,319,136,992,864,681,59,553,498,370,187,915,787,732,604,421, 293,238,110,966,838,655,527,472,344,33,161,889,706,578,395,267,212,1017,940, 812,757,7,629,446,318,135,991,863,680,552,497,369,186,914,786,731,603,420, 292,237,109,965,837,654,526,471,343,160,888,83,705,577,394,266,211,1016,939, 811,756,628,445,317,134,990,862,679,57,551,496,368,185,913,785,730,602,419, 291,236,108,964,836,653,525,470,342,31,159,887,704,576,393,265,210,1015,938, 810,755,627,5,444,316,133,989,861,678,550,495,367,184,912,784,729,601,418, 290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937, 809,754,626,443,315,132,988,860,677,55,549,494,366,183,911,783,728,600,417, 289,234,962,834,651,523,468,340,29,157,885,702,574,391,263,208,1013,936,808, 753,625,442,314,131,987,859,676,548,493,365,182,910,782,727,599,416,288,233, 105,961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807,752, 624,441,313,130,986,858,675,547,53,492,364,181,909,781,726,598,415,287,232, 960,832,649,521,466,338,27,155,883,700,572,389,261,206,1011,934,806,751,623, 440,312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103,959, 831,648,520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128,984, 673,51,362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255,855, 544,489,178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126,982, 671,49,360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253,853, 542,487,176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124,980, 669,47,358,903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251,851, 540,485,174,97,774,719,408,953,642,331,876,565,510,199,799,744,433,122,978, 667,45,356,901,590,279,224,824,513,458,147,692,381,1003,926,615,304,249,849, 538,483,172,95,772,717,406,951,640,329,874,563,508,197,797,742,431,120,976, 665,43,354,899,588,277,222,822,767,456,145,690,379,1001,924,613,302,247,847, 536,481,170,93,770,715,404,949,638,327,872,561,506,195,795,740,429,118,974, 663,41,352,897,586,275,220,820,765,454,143,999,688,377,922,611,300,245,845, 534,479,168,91,768,713,402,947,636,325,870,559,504,193,793,738,427,116,972, 661,39,350,895,584,273,218,818,763,452,141,997,686,375,920,609,298,243] [ns_server:debug,2014-08-19T16:51:12.120,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,110983}, tap_estimate, {replica_building,"default",881,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.28226.0>, <<"replication_building_881_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:51:12.120,ns_1@10.242.238.88:<0.19453.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.28226.0>}, {'ns_1@10.242.238.89',<18124.32078.0>}]) [rebalance:info,2014-08-19T16:51:12.120,ns_1@10.242.238.88:<0.19444.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:12.121,ns_1@10.242.238.88:<0.19444.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 881 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:12.121,ns_1@10.242.238.88:<0.19444.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:12.122,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{881, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:12.124,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{625, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:12.124,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",625, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.19465.1>) [ns_server:debug,2014-08-19T16:51:12.125,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 625) [ns_server:debug,2014-08-19T16:51:12.125,ns_1@10.242.238.88:<0.19466.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:12.125,ns_1@10.242.238.88:<0.19466.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:51:12.125,ns_1@10.242.238.88:<0.19465.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 625 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:12.125,ns_1@10.242.238.88:<0.19471.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 625 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:12.125,ns_1@10.242.238.88:<0.19472.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 625 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:12.129,ns_1@10.242.238.88:<0.19473.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 625 into 'ns_1@10.242.238.89' is <18124.32097.0> [ns_server:debug,2014-08-19T16:51:12.131,ns_1@10.242.238.88:<0.19473.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 625 into 'ns_1@10.242.238.90' is <18125.27148.0> [rebalance:debug,2014-08-19T16:51:12.132,ns_1@10.242.238.88:<0.19465.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 625 is <0.19473.1> [ns_server:debug,2014-08-19T16:51:12.157,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,148352}, tap_estimate, {replica_building,"default",625,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32097.0>, <<"replication_building_625_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:12.169,ns_1@10.242.238.88:<0.19474.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.27148.0>}, {'ns_1@10.242.238.89',<18124.32097.0>}]) [ns_server:debug,2014-08-19T16:51:12.169,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,159608}, tap_estimate, {replica_building,"default",625,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27148.0>, <<"replication_building_625_'ns_1@10.242.238.90'">>} [rebalance:info,2014-08-19T16:51:12.169,ns_1@10.242.238.88:<0.19465.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:51:12.170,ns_1@10.242.238.88:<0.19465.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 625 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:12.170,ns_1@10.242.238.88:<0.19465.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:12.171,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{625, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [views:debug,2014-08-19T16:51:12.171,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/5. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:12.171,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",5,active,0} [ns_server:debug,2014-08-19T16:51:12.173,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{369, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:51:12.173,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",369, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.19486.1>) [ns_server:debug,2014-08-19T16:51:12.174,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 369) [ns_server:debug,2014-08-19T16:51:12.174,ns_1@10.242.238.88:<0.19487.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:12.174,ns_1@10.242.238.88:<0.19487.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:51:12.174,ns_1@10.242.238.88:<0.19486.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 369 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:12.175,ns_1@10.242.238.88:<0.19492.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 369 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:12.175,ns_1@10.242.238.88:<0.19493.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 369 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:12.179,ns_1@10.242.238.88:<0.19494.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 369 into 'ns_1@10.242.238.90' is <18125.27154.0> [ns_server:debug,2014-08-19T16:51:12.182,ns_1@10.242.238.88:<0.19494.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 369 into 'ns_1@10.242.238.89' is <18124.32102.0> [rebalance:debug,2014-08-19T16:51:12.182,ns_1@10.242.238.88:<0.19486.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 369 is <0.19494.1> [ns_server:debug,2014-08-19T16:51:12.205,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,196853}, tap_estimate, {replica_building,"default",369,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27154.0>, <<"replication_building_369_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:12.219,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,209966}, tap_estimate, {replica_building,"default",369,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32102.0>, <<"replication_building_369_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:12.219,ns_1@10.242.238.88:<0.19495.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.32102.0>}, {'ns_1@10.242.238.90',<18125.27154.0>}]) [rebalance:info,2014-08-19T16:51:12.219,ns_1@10.242.238.88:<0.19486.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:51:12.220,ns_1@10.242.238.88:<0.19486.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 369 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:12.220,ns_1@10.242.238.88:<0.19486.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:12.221,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{369, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:51:12.224,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{880, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:12.224,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",880, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.19507.1>) [ns_server:debug,2014-08-19T16:51:12.224,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 880) [ns_server:debug,2014-08-19T16:51:12.224,ns_1@10.242.238.88:<0.19508.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:12.224,ns_1@10.242.238.88:<0.19508.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:51:12.224,ns_1@10.242.238.88:<0.19507.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 880 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:12.225,ns_1@10.242.238.88:<0.19513.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 880 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:12.225,ns_1@10.242.238.88:<0.19514.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 880 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:12.228,ns_1@10.242.238.88:<0.19515.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 880 into 'ns_1@10.242.238.89' is <18124.32122.0> [ns_server:debug,2014-08-19T16:51:12.231,ns_1@10.242.238.88:<0.19515.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 880 into 'ns_1@10.242.238.91' is <18126.28246.0> [rebalance:debug,2014-08-19T16:51:12.231,ns_1@10.242.238.88:<0.19507.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 880 is <0.19515.1> [ns_server:debug,2014-08-19T16:51:12.254,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,245728}, tap_estimate, {replica_building,"default",880,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32122.0>, <<"replication_building_880_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:12.273,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,264451}, tap_estimate, {replica_building,"default",880,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.28246.0>, <<"replication_building_880_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:51:12.274,ns_1@10.242.238.88:<0.19516.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.28246.0>}, {'ns_1@10.242.238.89',<18124.32122.0>}]) [rebalance:info,2014-08-19T16:51:12.274,ns_1@10.242.238.88:<0.19507.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:12.274,ns_1@10.242.238.88:<0.19507.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 880 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:12.275,ns_1@10.242.238.88:<0.19507.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:12.275,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{880, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:12.278,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{624, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:12.278,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",624, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.19542.1>) [ns_server:debug,2014-08-19T16:51:12.279,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 624) [ns_server:debug,2014-08-19T16:51:12.279,ns_1@10.242.238.88:<0.19543.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:12.279,ns_1@10.242.238.88:<0.19543.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:51:12.279,ns_1@10.242.238.88:<0.19542.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 624 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:12.279,ns_1@10.242.238.88:<0.19548.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 624 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:12.279,ns_1@10.242.238.88:<0.19549.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 624 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:12.283,ns_1@10.242.238.88:<0.19550.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 624 into 'ns_1@10.242.238.89' is <18124.32127.0> [ns_server:debug,2014-08-19T16:51:12.286,ns_1@10.242.238.88:<0.19550.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 624 into 'ns_1@10.242.238.90' is <18125.27159.0> [rebalance:debug,2014-08-19T16:51:12.286,ns_1@10.242.238.88:<0.19542.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 624 is <0.19550.1> [ns_server:debug,2014-08-19T16:51:12.310,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,301928}, tap_estimate, {replica_building,"default",624,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32127.0>, <<"replication_building_624_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:12.322,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,312981}, tap_estimate, {replica_building,"default",624,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27159.0>, <<"replication_building_624_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:12.322,ns_1@10.242.238.88:<0.19551.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.27159.0>}, {'ns_1@10.242.238.89',<18124.32127.0>}]) [rebalance:info,2014-08-19T16:51:12.322,ns_1@10.242.238.88:<0.19542.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:51:12.323,ns_1@10.242.238.88:<0.19542.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 624 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:12.323,ns_1@10.242.238.88:<0.19542.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:12.324,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{624, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:12.327,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{368, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:51:12.327,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",368, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.19563.1>) [ns_server:debug,2014-08-19T16:51:12.327,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 368) [ns_server:debug,2014-08-19T16:51:12.327,ns_1@10.242.238.88:<0.19564.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:12.327,ns_1@10.242.238.88:<0.19564.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:51:12.327,ns_1@10.242.238.88:<0.19563.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 368 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:12.328,ns_1@10.242.238.88:<0.19569.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 368 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:12.328,ns_1@10.242.238.88:<0.19570.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 368 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:12.329,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 3. Nacking mccouch update. [views:debug,2014-08-19T16:51:12.329,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/3. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:12.329,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",3,active,0} [ns_server:debug,2014-08-19T16:51:12.332,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,25,881,570,259,204,804, 749,438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931, 620,309,254,854,543,488,177,777,722,411,956,645,334,23,879,568,257,202,802, 747,436,125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929, 618,307,252,852,541,486,175,775,720,409,954,643,332,21,877,566,511,200,800, 745,434,123,979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927, 616,305,250,850,539,484,173,773,718,407,952,641,330,19,875,564,509,198,798, 743,432,121,977,666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925, 614,303,248,848,537,482,171,771,716,405,950,639,328,17,873,562,507,196,796, 741,430,119,975,664,353,898,587,276,221,821,766,455,144,689,67,378,1000,923, 612,301,246,846,535,480,169,769,714,403,948,637,326,15,871,560,505,194,794, 739,428,117,973,662,351,896,585,274,219,819,764,453,142,998,687,65,376,921, 610,299,244,844,533,478,167,712,401,1023,946,635,324,13,869,558,503,192,792, 737,426,115,971,843,660,532,477,349,166,894,89,711,583,400,272,217,1022,945, 817,762,634,451,323,140,996,868,685,63,557,502,374,191,919,791,736,608,425, 297,242,114,970,842,659,531,476,37,348,165,893,710,582,399,271,216,1021,944, 816,761,633,450,322,139,11,995,867,684,556,501,373,190,918,790,735,607,424, 296,241,113,969,841,658,530,475,347,164,892,87,709,581,398,270,215,1020,943, 815,760,632,449,321,138,994,866,683,61,555,500,372,189,917,789,734,606,423, 295,240,112,968,840,657,529,474,35,346,163,891,708,580,397,269,214,1019,942, 9,814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422, 294,239,111,967,839,656,528,473,345,162,890,85,707,579,396,268,213,1018,941, 813,758,630,447,319,136,992,864,681,59,553,498,370,187,915,787,732,604,421, 293,238,110,966,838,655,527,472,344,33,161,889,706,578,395,267,212,1017,940, 812,757,7,629,446,318,135,991,863,680,552,497,369,186,914,786,731,603,420, 292,237,109,965,837,654,526,471,343,160,888,83,705,577,394,266,211,1016,939, 811,756,628,445,317,134,990,862,679,57,551,496,368,185,913,785,730,602,419, 291,236,108,964,836,653,525,470,342,31,159,887,704,576,393,265,210,1015,938, 810,755,627,5,444,316,133,989,861,678,550,495,367,184,912,784,729,601,418, 290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937, 809,754,626,443,315,132,988,860,677,55,549,494,366,183,911,783,728,600,417, 289,234,962,834,651,523,468,340,29,157,885,702,574,391,263,208,1013,936,808, 753,625,442,314,3,131,987,859,676,548,493,365,182,910,782,727,599,416,288, 233,105,961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807, 752,624,441,313,130,986,858,675,547,53,492,364,181,909,781,726,598,415,287, 232,960,832,649,521,466,338,27,155,883,700,572,389,261,206,1011,934,806,751, 623,440,312,129,985,857,674,546,491,363,180,908,780,725,597,414,286,231,103, 959,831,648,520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439,128, 984,673,51,362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310,255, 855,544,489,178,778,723,412,101,957,646,335,880,569,258,203,803,748,437,126, 982,671,49,360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308,253, 853,542,487,176,99,776,721,410,955,644,333,878,567,256,201,801,746,435,124, 980,669,47,358,903,592,281,226,826,515,460,149,694,383,1005,928,617,306,251, 851,540,485,174,97,774,719,408,953,642,331,876,565,510,199,799,744,433,122, 978,667,45,356,901,590,279,224,824,513,458,147,692,381,1003,926,615,304,249, 849,538,483,172,95,772,717,406,951,640,329,874,563,508,197,797,742,431,120, 976,665,43,354,899,588,277,222,822,767,456,145,690,379,1001,924,613,302,247, 847,536,481,170,93,770,715,404,949,638,327,872,561,506,195,795,740,429,118, 974,663,41,352,897,586,275,220,820,765,454,143,999,688,377,922,611,300,245, 845,534,479,168,91,768,713,402,947,636,325,870,559,504,193,793,738,427,116, 972,661,39,350,895,584,273,218,818,763,452,141,997,686,375,920,609,298,243] [ns_server:debug,2014-08-19T16:51:12.332,ns_1@10.242.238.88:<0.19571.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 368 into 'ns_1@10.242.238.90' is <18125.27165.0> [ns_server:debug,2014-08-19T16:51:12.335,ns_1@10.242.238.88:<0.19571.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 368 into 'ns_1@10.242.238.89' is <18124.32132.0> [rebalance:debug,2014-08-19T16:51:12.335,ns_1@10.242.238.88:<0.19563.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 368 is <0.19571.1> [ns_server:debug,2014-08-19T16:51:12.360,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,351026}, tap_estimate, {replica_building,"default",368,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27165.0>, <<"replication_building_368_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:12.372,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,363569}, tap_estimate, {replica_building,"default",368,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32132.0>, <<"replication_building_368_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:12.373,ns_1@10.242.238.88:<0.19572.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.32132.0>}, {'ns_1@10.242.238.90',<18125.27165.0>}]) [rebalance:info,2014-08-19T16:51:12.373,ns_1@10.242.238.88:<0.19563.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:51:12.374,ns_1@10.242.238.88:<0.19563.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 368 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:12.374,ns_1@10.242.238.88:<0.19563.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:12.375,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{368, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:51:12.377,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{879, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:12.377,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",879, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.19584.1>) [ns_server:debug,2014-08-19T16:51:12.377,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 879) [ns_server:debug,2014-08-19T16:51:12.378,ns_1@10.242.238.88:<0.19585.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:12.378,ns_1@10.242.238.88:<0.19585.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:51:12.378,ns_1@10.242.238.88:<0.19584.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 879 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:12.378,ns_1@10.242.238.88:<0.19590.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 879 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:12.378,ns_1@10.242.238.88:<0.19591.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 879 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [views:debug,2014-08-19T16:51:12.380,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/3. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:12.380,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",3,active,0} [ns_server:debug,2014-08-19T16:51:12.383,ns_1@10.242.238.88:<0.19592.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 879 into 'ns_1@10.242.238.89' is <18124.32138.0> [ns_server:debug,2014-08-19T16:51:12.385,ns_1@10.242.238.88:<0.19592.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 879 into 'ns_1@10.242.238.91' is <18126.28266.0> [rebalance:debug,2014-08-19T16:51:12.385,ns_1@10.242.238.88:<0.19584.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 879 is <0.19592.1> [ns_server:debug,2014-08-19T16:51:12.409,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,400856}, tap_estimate, {replica_building,"default",879,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32138.0>, <<"replication_building_879_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:12.422,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,413773}, tap_estimate, {replica_building,"default",879,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.28266.0>, <<"replication_building_879_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:51:12.423,ns_1@10.242.238.88:<0.19593.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.28266.0>}, {'ns_1@10.242.238.89',<18124.32138.0>}]) [rebalance:info,2014-08-19T16:51:12.423,ns_1@10.242.238.88:<0.19584.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:12.424,ns_1@10.242.238.88:<0.19584.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 879 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:12.424,ns_1@10.242.238.88:<0.19584.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:12.425,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{879, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:12.427,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{623, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:12.427,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",623, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.19605.1>) [ns_server:debug,2014-08-19T16:51:12.428,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 623) [ns_server:debug,2014-08-19T16:51:12.428,ns_1@10.242.238.88:<0.19606.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:12.428,ns_1@10.242.238.88:<0.19606.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:51:12.428,ns_1@10.242.238.88:<0.19605.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 623 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:12.428,ns_1@10.242.238.88:<0.19611.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 623 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:12.428,ns_1@10.242.238.88:<0.19612.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 623 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:12.432,ns_1@10.242.238.88:<0.19613.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 623 into 'ns_1@10.242.238.89' is <18124.32157.0> [ns_server:debug,2014-08-19T16:51:12.435,ns_1@10.242.238.88:<0.19613.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 623 into 'ns_1@10.242.238.90' is <18125.27184.0> [rebalance:debug,2014-08-19T16:51:12.435,ns_1@10.242.238.88:<0.19605.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 623 is <0.19613.1> [ns_server:debug,2014-08-19T16:51:12.459,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,450330}, tap_estimate, {replica_building,"default",623,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32157.0>, <<"replication_building_623_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:12.470,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,461833}, tap_estimate, {replica_building,"default",623,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27184.0>, <<"replication_building_623_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:12.471,ns_1@10.242.238.88:<0.19614.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.27184.0>}, {'ns_1@10.242.238.89',<18124.32157.0>}]) [rebalance:info,2014-08-19T16:51:12.471,ns_1@10.242.238.88:<0.19605.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:51:12.471,ns_1@10.242.238.88:<0.19605.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 623 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:12.472,ns_1@10.242.238.88:<0.19605.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:12.473,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{623, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:12.475,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{367, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:51:12.475,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",367, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.19640.1>) [ns_server:debug,2014-08-19T16:51:12.476,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 367) [ns_server:debug,2014-08-19T16:51:12.476,ns_1@10.242.238.88:<0.19641.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:12.476,ns_1@10.242.238.88:<0.19641.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:51:12.476,ns_1@10.242.238.88:<0.19640.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 367 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:12.476,ns_1@10.242.238.88:<0.19646.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 367 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:12.476,ns_1@10.242.238.88:<0.19647.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 367 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:12.480,ns_1@10.242.238.88:<0.19648.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 367 into 'ns_1@10.242.238.90' is <18125.27190.0> [ns_server:debug,2014-08-19T16:51:12.483,ns_1@10.242.238.88:<0.19648.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 367 into 'ns_1@10.242.238.89' is <18124.32162.0> [rebalance:debug,2014-08-19T16:51:12.483,ns_1@10.242.238.88:<0.19640.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 367 is <0.19648.1> [ns_server:debug,2014-08-19T16:51:12.507,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,498501}, tap_estimate, {replica_building,"default",367,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27190.0>, <<"replication_building_367_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:12.518,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,509962}, tap_estimate, {replica_building,"default",367,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32162.0>, <<"replication_building_367_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:12.519,ns_1@10.242.238.88:<0.19649.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.32162.0>}, {'ns_1@10.242.238.90',<18125.27190.0>}]) [rebalance:info,2014-08-19T16:51:12.519,ns_1@10.242.238.88:<0.19640.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:51:12.520,ns_1@10.242.238.88:<0.19640.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 367 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:12.520,ns_1@10.242.238.88:<0.19640.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:12.521,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{367, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:51:12.523,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{878, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:12.524,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",878, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.19661.1>) [ns_server:debug,2014-08-19T16:51:12.524,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 878) [ns_server:debug,2014-08-19T16:51:12.524,ns_1@10.242.238.88:<0.19662.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:12.524,ns_1@10.242.238.88:<0.19662.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:51:12.524,ns_1@10.242.238.88:<0.19661.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 878 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:12.525,ns_1@10.242.238.88:<0.19667.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 878 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:12.525,ns_1@10.242.238.88:<0.19668.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 878 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:12.528,ns_1@10.242.238.88:<0.19669.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 878 into 'ns_1@10.242.238.89' is <18124.32168.0> [ns_server:debug,2014-08-19T16:51:12.532,ns_1@10.242.238.88:<0.19669.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 878 into 'ns_1@10.242.238.91' is <18126.28292.0> [rebalance:debug,2014-08-19T16:51:12.532,ns_1@10.242.238.88:<0.19661.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 878 is <0.19669.1> [ns_server:debug,2014-08-19T16:51:12.547,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 1. Nacking mccouch update. [views:debug,2014-08-19T16:51:12.547,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:12.547,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1,active,0} [ns_server:debug,2014-08-19T16:51:12.549,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [933,622,311,856,545,490,179,779,724,413,958,647,336,25,881,570,259,204,804, 749,438,127,983,672,361,906,595,284,229,829,518,463,152,75,697,386,1008,931, 620,309,254,854,543,488,177,777,722,411,956,645,334,23,879,568,257,202,802, 747,436,125,981,670,359,904,593,282,227,827,516,461,150,73,695,384,1006,929, 618,307,252,852,541,486,175,775,720,409,954,643,332,21,877,566,511,200,800, 745,434,123,979,668,357,902,591,280,225,825,514,459,148,71,693,382,1004,927, 616,305,250,850,539,484,173,773,718,407,952,641,330,19,875,564,509,198,798, 743,432,121,977,666,355,900,589,278,223,823,512,457,146,691,69,380,1002,925, 614,303,248,848,537,482,171,771,716,405,950,639,328,17,873,562,507,196,796, 741,430,119,975,664,353,898,587,276,221,821,766,455,144,689,67,378,1000,923, 612,301,246,846,535,480,169,769,714,403,948,637,326,15,871,560,505,194,794, 739,428,117,973,662,351,896,585,274,219,819,764,453,142,998,687,65,376,921, 610,299,244,844,533,478,167,712,401,1023,946,635,324,13,869,558,503,192,792, 737,426,115,971,843,660,532,477,349,166,894,89,711,583,400,272,217,1022,945, 817,762,634,451,323,140,996,868,685,63,557,502,374,191,919,791,736,608,425, 297,242,114,970,842,659,531,476,37,348,165,893,710,582,399,271,216,1021,944, 816,761,633,450,322,139,11,995,867,684,556,501,373,190,918,790,735,607,424, 296,241,113,969,841,658,530,475,347,164,892,87,709,581,398,270,215,1020,943, 815,760,632,449,321,138,994,866,683,61,555,500,372,189,917,789,734,606,423, 295,240,112,968,840,657,529,474,35,346,163,891,708,580,397,269,214,1019,942, 9,814,759,631,448,320,137,993,865,682,554,499,371,188,916,788,733,605,422, 294,239,111,967,839,656,528,473,345,162,890,85,707,579,396,268,213,1018,941, 813,758,630,447,319,136,992,864,681,59,553,498,370,187,915,787,732,604,421, 293,238,110,966,838,655,527,472,344,33,161,889,706,578,395,267,212,1017,940, 812,757,7,629,446,318,135,991,863,680,552,497,369,186,914,786,731,603,420, 292,237,109,965,837,654,526,471,343,160,888,83,705,577,394,266,211,1016,939, 811,756,628,445,317,134,990,862,679,57,551,496,368,185,913,785,730,602,419, 291,236,108,964,836,653,525,470,342,31,159,887,704,576,393,265,210,1015,938, 810,755,627,5,444,316,133,989,861,678,550,495,367,184,912,784,729,601,418, 290,235,107,963,835,652,524,469,341,158,886,81,703,575,392,264,209,1014,937, 809,754,626,443,315,132,988,860,677,55,549,494,366,183,911,783,728,600,417, 289,234,962,834,651,523,468,340,29,157,885,702,574,391,263,208,1013,936,808, 753,625,442,314,3,131,987,859,676,548,493,365,182,910,782,727,599,416,288, 233,105,961,833,650,522,467,339,156,884,79,701,573,390,262,207,1012,935,807, 752,624,441,313,130,986,858,675,547,53,492,364,181,909,781,726,598,415,287, 232,960,832,649,521,466,338,27,155,883,700,572,389,261,206,1011,934,806,751, 623,440,312,129,1,985,857,674,546,491,363,180,908,780,725,597,414,286,231, 103,959,831,648,520,465,337,154,882,77,699,571,388,260,205,1010,805,750,439, 128,984,673,51,362,907,596,285,230,830,519,464,153,698,387,1009,932,621,310, 255,855,544,489,178,778,723,412,101,957,646,335,880,569,258,203,803,748,437, 126,982,671,49,360,905,594,283,228,828,517,462,151,696,385,1007,930,619,308, 253,853,542,487,176,99,776,721,410,955,644,333,878,567,256,201,801,746,435, 124,980,669,47,358,903,592,281,226,826,515,460,149,694,383,1005,928,617,306, 251,851,540,485,174,97,774,719,408,953,642,331,876,565,510,199,799,744,433, 122,978,667,45,356,901,590,279,224,824,513,458,147,692,381,1003,926,615,304, 249,849,538,483,172,95,772,717,406,951,640,329,874,563,508,197,797,742,431, 120,976,665,43,354,899,588,277,222,822,767,456,145,690,379,1001,924,613,302, 247,847,536,481,170,93,770,715,404,949,638,327,872,561,506,195,795,740,429, 118,974,663,41,352,897,586,275,220,820,765,454,143,999,688,377,922,611,300, 245,845,534,479,168,91,768,713,402,947,636,325,870,559,504,193,793,738,427, 116,972,661,39,350,895,584,273,218,818,763,452,141,997,686,375,920,609,298, 243] [ns_server:debug,2014-08-19T16:51:12.556,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,547453}, tap_estimate, {replica_building,"default",878,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32168.0>, <<"replication_building_878_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:12.568,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,559380}, tap_estimate, {replica_building,"default",878,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.28292.0>, <<"replication_building_878_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:51:12.568,ns_1@10.242.238.88:<0.19670.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.28292.0>}, {'ns_1@10.242.238.89',<18124.32168.0>}]) [rebalance:info,2014-08-19T16:51:12.568,ns_1@10.242.238.88:<0.19661.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:12.569,ns_1@10.242.238.88:<0.19661.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 878 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:12.570,ns_1@10.242.238.88:<0.19661.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:12.570,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{878, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:12.573,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{622, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:12.573,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",622, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.19682.1>) [ns_server:debug,2014-08-19T16:51:12.573,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 622) [ns_server:debug,2014-08-19T16:51:12.574,ns_1@10.242.238.88:<0.19683.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:12.574,ns_1@10.242.238.88:<0.19683.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:51:12.574,ns_1@10.242.238.88:<0.19682.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 622 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:12.574,ns_1@10.242.238.88:<0.19688.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 622 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:12.574,ns_1@10.242.238.88:<0.19689.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 622 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:12.578,ns_1@10.242.238.88:<0.19690.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 622 into 'ns_1@10.242.238.89' is <18124.32179.0> [ns_server:debug,2014-08-19T16:51:12.580,ns_1@10.242.238.88:<0.19690.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 622 into 'ns_1@10.242.238.90' is <18125.27210.0> [rebalance:debug,2014-08-19T16:51:12.581,ns_1@10.242.238.88:<0.19682.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 622 is <0.19690.1> [ns_server:debug,2014-08-19T16:51:12.604,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,595296}, tap_estimate, {replica_building,"default",622,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32179.0>, <<"replication_building_622_'ns_1@10.242.238.89'">>} [views:debug,2014-08-19T16:51:12.614,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/1. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:12.614,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",1,active,0} [ns_server:debug,2014-08-19T16:51:12.617,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/511 [ns_server:debug,2014-08-19T16:51:12.617,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,608924}, tap_estimate, {replica_building,"default",622,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27210.0>, <<"replication_building_622_'ns_1@10.242.238.90'">>} [ns_server:info,2014-08-19T16:51:12.618,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/511 [ns_server:debug,2014-08-19T16:51:12.618,ns_1@10.242.238.88:<0.19691.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.27210.0>}, {'ns_1@10.242.238.89',<18124.32179.0>}]) [rebalance:info,2014-08-19T16:51:12.618,ns_1@10.242.238.88:<0.19682.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:51:12.619,ns_1@10.242.238.88:<0.19682.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 622 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:12.620,ns_1@10.242.238.88:<0.19682.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:12.621,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{622, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:12.623,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/767 [ns_server:debug,2014-08-19T16:51:12.624,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{366, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [ns_server:info,2014-08-19T16:51:12.624,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/767 [rebalance:debug,2014-08-19T16:51:12.624,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",366, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.19704.1>) [ns_server:debug,2014-08-19T16:51:12.624,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 366) [ns_server:debug,2014-08-19T16:51:12.624,ns_1@10.242.238.88:<0.19705.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:12.624,ns_1@10.242.238.88:<0.19705.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:51:12.624,ns_1@10.242.238.88:<0.19704.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 366 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:12.625,ns_1@10.242.238.88:<0.19711.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 366 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [rebalance:info,2014-08-19T16:51:12.625,ns_1@10.242.238.88:<0.19710.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 366 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [ns_server:debug,2014-08-19T16:51:12.628,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1023 [ns_server:info,2014-08-19T16:51:12.629,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1023 [ns_server:debug,2014-08-19T16:51:12.630,ns_1@10.242.238.88:<0.19713.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 366 into 'ns_1@10.242.238.90' is <18125.27221.0> [ns_server:debug,2014-08-19T16:51:12.633,ns_1@10.242.238.88:<0.19713.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 366 into 'ns_1@10.242.238.89' is <18124.32192.0> [rebalance:debug,2014-08-19T16:51:12.633,ns_1@10.242.238.88:<0.19704.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 366 is <0.19713.1> [ns_server:debug,2014-08-19T16:51:12.634,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/766 [ns_server:info,2014-08-19T16:51:12.635,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/766 [ns_server:debug,2014-08-19T16:51:12.638,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1022 [ns_server:info,2014-08-19T16:51:12.639,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1022 [ns_server:debug,2014-08-19T16:51:12.643,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/504 [ns_server:info,2014-08-19T16:51:12.644,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/504 [ns_server:debug,2014-08-19T16:51:12.651,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/506 [ns_server:info,2014-08-19T16:51:12.652,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/506 [ns_server:debug,2014-08-19T16:51:12.656,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/508 [ns_server:debug,2014-08-19T16:51:12.656,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,647300}, tap_estimate, {replica_building,"default",366,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27221.0>, <<"replication_building_366_'ns_1@10.242.238.90'">>} [ns_server:info,2014-08-19T16:51:12.657,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/508 [ns_server:debug,2014-08-19T16:51:12.661,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/510 [ns_server:info,2014-08-19T16:51:12.662,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/510 [ns_server:debug,2014-08-19T16:51:12.666,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/760 [ns_server:info,2014-08-19T16:51:12.667,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/760 [ns_server:debug,2014-08-19T16:51:12.669,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,660160}, tap_estimate, {replica_building,"default",366,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32192.0>, <<"replication_building_366_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:12.669,ns_1@10.242.238.88:<0.19714.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.32192.0>}, {'ns_1@10.242.238.90',<18125.27221.0>}]) [rebalance:info,2014-08-19T16:51:12.669,ns_1@10.242.238.88:<0.19704.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:51:12.670,ns_1@10.242.238.88:<0.19704.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 366 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:12.670,ns_1@10.242.238.88:<0.19704.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:12.671,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{366, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:51:12.672,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/762 [ns_server:info,2014-08-19T16:51:12.673,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/762 [ns_server:debug,2014-08-19T16:51:12.674,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{877, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:12.674,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",877, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.19734.1>) [ns_server:debug,2014-08-19T16:51:12.675,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 877) [ns_server:debug,2014-08-19T16:51:12.675,ns_1@10.242.238.88:<0.19735.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:12.675,ns_1@10.242.238.88:<0.19735.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:51:12.675,ns_1@10.242.238.88:<0.19734.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 877 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:12.675,ns_1@10.242.238.88:<0.19740.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 877 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:12.675,ns_1@10.242.238.88:<0.19741.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 877 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:12.677,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/764 [ns_server:info,2014-08-19T16:51:12.678,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/764 [ns_server:debug,2014-08-19T16:51:12.680,ns_1@10.242.238.88:<0.19743.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 877 into 'ns_1@10.242.238.89' is <18124.32198.0> [ns_server:debug,2014-08-19T16:51:12.683,ns_1@10.242.238.88:<0.19743.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 877 into 'ns_1@10.242.238.91' is <18126.28298.0> [rebalance:debug,2014-08-19T16:51:12.683,ns_1@10.242.238.88:<0.19734.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 877 is <0.19743.1> [ns_server:debug,2014-08-19T16:51:12.683,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/491 [ns_server:info,2014-08-19T16:51:12.684,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/491 [ns_server:debug,2014-08-19T16:51:12.688,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/493 [ns_server:info,2014-08-19T16:51:12.689,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/493 [ns_server:debug,2014-08-19T16:51:12.692,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/495 [ns_server:info,2014-08-19T16:51:12.693,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/495 [ns_server:debug,2014-08-19T16:51:12.697,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/497 [ns_server:info,2014-08-19T16:51:12.698,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/497 [ns_server:debug,2014-08-19T16:51:12.702,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/499 [ns_server:info,2014-08-19T16:51:12.703,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/499 [ns_server:debug,2014-08-19T16:51:12.706,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/503 [ns_server:info,2014-08-19T16:51:12.707,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/503 [ns_server:debug,2014-08-19T16:51:12.712,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/501 [ns_server:info,2014-08-19T16:51:12.713,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/501 [ns_server:debug,2014-08-19T16:51:12.714,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,700557}, tap_estimate, {replica_building,"default",877,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32198.0>, <<"replication_building_877_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:12.717,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/509 [ns_server:debug,2014-08-19T16:51:12.718,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,709049}, tap_estimate, {replica_building,"default",877,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.28298.0>, <<"replication_building_877_'ns_1@10.242.238.91'">>} [ns_server:info,2014-08-19T16:51:12.718,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/509 [ns_server:debug,2014-08-19T16:51:12.718,ns_1@10.242.238.88:<0.19744.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.28298.0>}, {'ns_1@10.242.238.89',<18124.32198.0>}]) [rebalance:info,2014-08-19T16:51:12.718,ns_1@10.242.238.88:<0.19734.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:12.719,ns_1@10.242.238.88:<0.19734.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 877 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:12.719,ns_1@10.242.238.88:<0.19734.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:12.720,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{877, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:12.721,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/494 [ns_server:info,2014-08-19T16:51:12.722,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/494 [ns_server:debug,2014-08-19T16:51:12.723,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{621, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:12.723,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",621, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.19765.1>) [ns_server:debug,2014-08-19T16:51:12.723,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 621) [ns_server:debug,2014-08-19T16:51:12.723,ns_1@10.242.238.88:<0.19766.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:12.723,ns_1@10.242.238.88:<0.19766.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:51:12.723,ns_1@10.242.238.88:<0.19765.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 621 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:12.724,ns_1@10.242.238.88:<0.19771.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 621 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:12.724,ns_1@10.242.238.88:<0.19772.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 621 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:12.727,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/751 [ns_server:info,2014-08-19T16:51:12.728,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/751 [ns_server:debug,2014-08-19T16:51:12.729,ns_1@10.242.238.88:<0.19773.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 621 into 'ns_1@10.242.238.89' is <18124.32203.0> [ns_server:debug,2014-08-19T16:51:12.732,ns_1@10.242.238.88:<0.19773.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 621 into 'ns_1@10.242.238.90' is <18125.27235.0> [rebalance:debug,2014-08-19T16:51:12.732,ns_1@10.242.238.88:<0.19765.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 621 is <0.19773.1> [ns_server:debug,2014-08-19T16:51:12.732,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/505 [ns_server:info,2014-08-19T16:51:12.733,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/505 [ns_server:debug,2014-08-19T16:51:12.737,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/498 [ns_server:info,2014-08-19T16:51:12.738,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/498 [ns_server:debug,2014-08-19T16:51:12.742,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/496 [ns_server:info,2014-08-19T16:51:12.742,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/496 [ns_server:debug,2014-08-19T16:51:12.746,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/492 [ns_server:info,2014-08-19T16:51:12.747,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/492 [ns_server:debug,2014-08-19T16:51:12.752,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/507 [ns_server:info,2014-08-19T16:51:12.753,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/507 [ns_server:debug,2014-08-19T16:51:12.757,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/750 [ns_server:debug,2014-08-19T16:51:12.757,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,748350}, tap_estimate, {replica_building,"default",621,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32203.0>, <<"replication_building_621_'ns_1@10.242.238.89'">>} [ns_server:info,2014-08-19T16:51:12.758,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/750 [ns_server:debug,2014-08-19T16:51:12.761,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/753 [ns_server:info,2014-08-19T16:51:12.762,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/753 [ns_server:debug,2014-08-19T16:51:12.766,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1010 [ns_server:info,2014-08-19T16:51:12.767,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1010 [ns_server:debug,2014-08-19T16:51:12.768,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,759297}, tap_estimate, {replica_building,"default",621,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27235.0>, <<"replication_building_621_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:12.768,ns_1@10.242.238.88:<0.19776.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.27235.0>}, {'ns_1@10.242.238.89',<18124.32203.0>}]) [rebalance:info,2014-08-19T16:51:12.768,ns_1@10.242.238.88:<0.19765.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:51:12.769,ns_1@10.242.238.88:<0.19765.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 621 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:12.769,ns_1@10.242.238.88:<0.19765.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:12.770,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{621, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:12.770,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1019 [ns_server:info,2014-08-19T16:51:12.772,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1019 [ns_server:debug,2014-08-19T16:51:12.774,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{365, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:51:12.774,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",365, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.19796.1>) [ns_server:debug,2014-08-19T16:51:12.775,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 365) [ns_server:debug,2014-08-19T16:51:12.775,ns_1@10.242.238.88:<0.19797.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:12.775,ns_1@10.242.238.88:<0.19797.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:51:12.775,ns_1@10.242.238.88:<0.19796.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 365 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:12.775,ns_1@10.242.238.88:<0.19802.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 365 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:12.775,ns_1@10.242.238.88:<0.19803.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 365 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:12.776,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1013 [ns_server:info,2014-08-19T16:51:12.777,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1013 [ns_server:debug,2014-08-19T16:51:12.779,ns_1@10.242.238.88:<0.19805.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 365 into 'ns_1@10.242.238.90' is <18125.27255.0> [ns_server:debug,2014-08-19T16:51:12.782,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/502 [ns_server:debug,2014-08-19T16:51:12.783,ns_1@10.242.238.88:<0.19805.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 365 into 'ns_1@10.242.238.89' is <18124.32222.0> [ns_server:info,2014-08-19T16:51:12.783,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/502 [rebalance:debug,2014-08-19T16:51:12.783,ns_1@10.242.238.88:<0.19796.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 365 is <0.19805.1> [ns_server:debug,2014-08-19T16:51:12.787,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/763 [ns_server:info,2014-08-19T16:51:12.787,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/763 [ns_server:debug,2014-08-19T16:51:12.791,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/500 [ns_server:info,2014-08-19T16:51:12.792,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/500 [ns_server:debug,2014-08-19T16:51:12.796,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1006 [ns_server:info,2014-08-19T16:51:12.797,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1006 [ns_server:debug,2014-08-19T16:51:12.801,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1008 [ns_server:info,2014-08-19T16:51:12.802,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1008 [ns_server:debug,2014-08-19T16:51:12.806,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1016 [ns_server:debug,2014-08-19T16:51:12.806,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,797763}, tap_estimate, {replica_building,"default",365,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27255.0>, <<"replication_building_365_'ns_1@10.242.238.90'">>} [ns_server:info,2014-08-19T16:51:12.807,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1016 [ns_server:debug,2014-08-19T16:51:12.811,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/759 [ns_server:info,2014-08-19T16:51:12.812,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/759 [ns_server:debug,2014-08-19T16:51:12.818,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,809226}, tap_estimate, {replica_building,"default",365,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32222.0>, <<"replication_building_365_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:12.818,ns_1@10.242.238.88:<0.19807.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.32222.0>}, {'ns_1@10.242.238.90',<18125.27255.0>}]) [ns_server:debug,2014-08-19T16:51:12.818,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1015 [rebalance:info,2014-08-19T16:51:12.818,ns_1@10.242.238.88:<0.19796.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [ns_server:info,2014-08-19T16:51:12.819,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1015 [rebalance:info,2014-08-19T16:51:12.819,ns_1@10.242.238.88:<0.19796.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 365 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:12.820,ns_1@10.242.238.88:<0.19796.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:12.820,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{365, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:51:12.824,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{876, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:12.825,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",876, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.19826.1>) [ns_server:debug,2014-08-19T16:51:12.825,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 876) [ns_server:debug,2014-08-19T16:51:12.825,ns_1@10.242.238.88:<0.19827.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:12.825,ns_1@10.242.238.88:<0.19827.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [ns_server:debug,2014-08-19T16:51:12.826,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/755 [rebalance:info,2014-08-19T16:51:12.825,ns_1@10.242.238.88:<0.19826.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 876 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:12.826,ns_1@10.242.238.88:<0.19833.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 876 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:12.826,ns_1@10.242.238.88:<0.19834.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 876 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:info,2014-08-19T16:51:12.826,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/755 [ns_server:debug,2014-08-19T16:51:12.829,ns_1@10.242.238.88:<0.19835.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 876 into 'ns_1@10.242.238.89' is <18124.32228.0> [ns_server:debug,2014-08-19T16:51:12.832,ns_1@10.242.238.88:<0.19835.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 876 into 'ns_1@10.242.238.91' is <18126.28318.0> [rebalance:debug,2014-08-19T16:51:12.832,ns_1@10.242.238.88:<0.19826.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 876 is <0.19835.1> [ns_server:debug,2014-08-19T16:51:12.832,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/758 [ns_server:info,2014-08-19T16:51:12.833,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/758 [ns_server:debug,2014-08-19T16:51:12.841,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/748 [ns_server:info,2014-08-19T16:51:12.842,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/748 [ns_server:debug,2014-08-19T16:51:12.847,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1005 [ns_server:info,2014-08-19T16:51:12.848,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1005 [ns_server:debug,2014-08-19T16:51:12.853,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1004 [ns_server:info,2014-08-19T16:51:12.854,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1004 [ns_server:debug,2014-08-19T16:51:12.855,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,846883}, tap_estimate, {replica_building,"default",876,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32228.0>, <<"replication_building_876_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:12.860,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1017 [ns_server:info,2014-08-19T16:51:12.861,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1017 [ns_server:debug,2014-08-19T16:51:12.866,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/749 [ns_server:info,2014-08-19T16:51:12.867,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/749 [ns_server:debug,2014-08-19T16:51:12.868,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,859376}, tap_estimate, {replica_building,"default",876,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.28318.0>, <<"replication_building_876_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:51:12.868,ns_1@10.242.238.88:<0.19837.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.28318.0>}, {'ns_1@10.242.238.89',<18124.32228.0>}]) [rebalance:info,2014-08-19T16:51:12.869,ns_1@10.242.238.88:<0.19826.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:12.869,ns_1@10.242.238.88:<0.19826.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 876 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:12.870,ns_1@10.242.238.88:<0.19826.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:12.870,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{876, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:12.872,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/757 [ns_server:debug,2014-08-19T16:51:12.873,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{620, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:12.873,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",620, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.19855.1>) [ns_server:info,2014-08-19T16:51:12.873,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/757 [ns_server:debug,2014-08-19T16:51:12.873,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 620) [ns_server:debug,2014-08-19T16:51:12.874,ns_1@10.242.238.88:<0.19856.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:12.874,ns_1@10.242.238.88:<0.19856.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:51:12.874,ns_1@10.242.238.88:<0.19855.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 620 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:12.874,ns_1@10.242.238.88:<0.19861.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 620 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:12.874,ns_1@10.242.238.88:<0.19862.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 620 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:12.879,ns_1@10.242.238.88:<0.19863.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 620 into 'ns_1@10.242.238.89' is <18124.32247.0> [ns_server:debug,2014-08-19T16:51:12.879,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1007 [ns_server:debug,2014-08-19T16:51:12.881,ns_1@10.242.238.88:<0.19863.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 620 into 'ns_1@10.242.238.90' is <18125.27275.0> [ns_server:info,2014-08-19T16:51:12.881,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1007 [rebalance:debug,2014-08-19T16:51:12.881,ns_1@10.242.238.88:<0.19855.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 620 is <0.19863.1> [ns_server:debug,2014-08-19T16:51:12.887,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/754 [ns_server:info,2014-08-19T16:51:12.887,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/754 [ns_server:debug,2014-08-19T16:51:12.893,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/765 [ns_server:info,2014-08-19T16:51:12.894,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/765 [ns_server:debug,2014-08-19T16:51:12.898,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/747 [ns_server:info,2014-08-19T16:51:12.899,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/747 [ns_server:debug,2014-08-19T16:51:12.905,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,896177}, tap_estimate, {replica_building,"default",620,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32247.0>, <<"replication_building_620_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:12.905,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/752 [ns_server:info,2014-08-19T16:51:12.906,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/752 [ns_server:debug,2014-08-19T16:51:12.911,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1018 [ns_server:info,2014-08-19T16:51:12.912,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1018 [ns_server:debug,2014-08-19T16:51:12.917,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,908406}, tap_estimate, {replica_building,"default",620,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27275.0>, <<"replication_building_620_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:12.917,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1014 [ns_server:debug,2014-08-19T16:51:12.917,ns_1@10.242.238.88:<0.19865.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.27275.0>}, {'ns_1@10.242.238.89',<18124.32247.0>}]) [rebalance:info,2014-08-19T16:51:12.918,ns_1@10.242.238.88:<0.19855.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:51:12.918,ns_1@10.242.238.88:<0.19855.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 620 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:12.918,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1014 [rebalance:info,2014-08-19T16:51:12.919,ns_1@10.242.238.88:<0.19855.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:12.919,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{620, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:12.922,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{364, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:51:12.922,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",364, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.19883.1>) [ns_server:debug,2014-08-19T16:51:12.922,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 364) [ns_server:debug,2014-08-19T16:51:12.922,ns_1@10.242.238.88:<0.19884.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:12.923,ns_1@10.242.238.88:<0.19884.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:51:12.923,ns_1@10.242.238.88:<0.19883.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 364 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:12.923,ns_1@10.242.238.88:<0.19889.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 364 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:12.923,ns_1@10.242.238.88:<0.19890.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 364 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:12.924,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/761 [ns_server:info,2014-08-19T16:51:12.925,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/761 [ns_server:debug,2014-08-19T16:51:12.926,ns_1@10.242.238.88:<0.19892.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 364 into 'ns_1@10.242.238.90' is <18125.27281.0> [ns_server:debug,2014-08-19T16:51:12.929,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1021 [ns_server:debug,2014-08-19T16:51:12.929,ns_1@10.242.238.88:<0.19892.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 364 into 'ns_1@10.242.238.89' is <18124.32252.0> [rebalance:debug,2014-08-19T16:51:12.929,ns_1@10.242.238.88:<0.19883.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 364 is <0.19892.1> [ns_server:info,2014-08-19T16:51:12.929,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1021 [ns_server:debug,2014-08-19T16:51:12.934,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1009 [ns_server:info,2014-08-19T16:51:12.935,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1009 [ns_server:debug,2014-08-19T16:51:12.941,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1003 [ns_server:info,2014-08-19T16:51:12.942,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1003 [ns_server:debug,2014-08-19T16:51:12.947,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1012 [ns_server:info,2014-08-19T16:51:12.948,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1012 [ns_server:debug,2014-08-19T16:51:12.953,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,943987}, tap_estimate, {replica_building,"default",364,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27281.0>, <<"replication_building_364_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:12.949,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,364, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0,<<>>}, {replica_building_stats,'ns_1@10.242.238.90',0,0,<<>>}]}, {move_state,620, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_620_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_620_'ns_1@10.242.238.89'">>}]}, {move_state,876, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_876_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_876_'ns_1@10.242.238.89'">>}]}, {move_state,365, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_365_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_365_'ns_1@10.242.238.90'">>}]}, {move_state,621, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_621_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_621_'ns_1@10.242.238.89'">>}]}, {move_state,877, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_877_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_877_'ns_1@10.242.238.89'">>}]}, {move_state,366, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_366_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_366_'ns_1@10.242.238.90'">>}]}, {move_state,622, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_622_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_622_'ns_1@10.242.238.89'">>}]}, {move_state,878, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_878_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_878_'ns_1@10.242.238.89'">>}]}, {move_state,367, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_367_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_367_'ns_1@10.242.238.90'">>}]}, {move_state,623, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_623_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_623_'ns_1@10.242.238.89'">>}]}, {move_state,879, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_879_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_879_'ns_1@10.242.238.89'">>}]}, {move_state,368, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_368_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_368_'ns_1@10.242.238.90'">>}]}, {move_state,624, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_624_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_624_'ns_1@10.242.238.89'">>}]}, {move_state,880, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_880_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_880_'ns_1@10.242.238.89'">>}]}, {move_state,369, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_369_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_369_'ns_1@10.242.238.90'">>}]}, {move_state,625, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_625_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_625_'ns_1@10.242.238.89'">>}]}, {move_state,881, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_881_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_881_'ns_1@10.242.238.89'">>}]}, {move_state,370, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_370_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_370_'ns_1@10.242.238.90'">>}]}, {move_state,626, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_626_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_626_'ns_1@10.242.238.89'">>}]}, {move_state,882, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_882_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_882_'ns_1@10.242.238.89'">>}]}, {move_state,371, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_371_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_371_'ns_1@10.242.238.90'">>}]}, {move_state,627, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_627_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_627_'ns_1@10.242.238.89'">>}]}, {move_state,883, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_883_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_883_'ns_1@10.242.238.89'">>}]}, {move_state,372, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_372_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_372_'ns_1@10.242.238.90'">>}]}, {move_state,628, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_628_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_628_'ns_1@10.242.238.89'">>}]}, {move_state,884, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_884_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_884_'ns_1@10.242.238.89'">>}]}, {move_state,373, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_373_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_373_'ns_1@10.242.238.90'">>}]}, {move_state,629, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_629_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_629_'ns_1@10.242.238.89'">>}]}, {move_state,885, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_885_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_885_'ns_1@10.242.238.89'">>}]}, {move_state,374, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_374_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_374_'ns_1@10.242.238.90'">>}]}, {move_state,630, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_630_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_630_'ns_1@10.242.238.89'">>}]}, {move_state,886, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_886_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_886_'ns_1@10.242.238.89'">>}]}, {move_state,375, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_375_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_375_'ns_1@10.242.238.90'">>}]}, {move_state,631, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_631_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_631_'ns_1@10.242.238.89'">>}]}, {move_state,887, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_887_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_887_'ns_1@10.242.238.89'">>}]}, {move_state,376, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_376_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_376_'ns_1@10.242.238.90'">>}]}, {move_state,632, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_632_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_632_'ns_1@10.242.238.89'">>}]}, {move_state,888, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_888_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_888_'ns_1@10.242.238.89'">>}]}, {move_state,377, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_377_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_377_'ns_1@10.242.238.90'">>}]}, {move_state,633, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_633_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_633_'ns_1@10.242.238.89'">>}]}, {move_state,889, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_889_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_889_'ns_1@10.242.238.89'">>}]}, {move_state,378, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_378_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_378_'ns_1@10.242.238.90'">>}]}, {move_state,634, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_634_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_634_'ns_1@10.242.238.89'">>}]}, {move_state,890, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_890_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_890_'ns_1@10.242.238.89'">>}]}, {move_state,379, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_379_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_379_'ns_1@10.242.238.90'">>}]}, {move_state,635, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_635_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_635_'ns_1@10.242.238.89'">>}]}, {move_state,891, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_891_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_891_'ns_1@10.242.238.89'">>}]}, {move_state,380, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_380_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_380_'ns_1@10.242.238.90'">>}]}, {move_state,636, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_636_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_636_'ns_1@10.242.238.89'">>}]}, {move_state,892, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_892_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_892_'ns_1@10.242.238.89'">>}]}, {move_state,381, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_381_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_381_'ns_1@10.242.238.90'">>}]}, {move_state,637, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_637_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_637_'ns_1@10.242.238.89'">>}]}, {move_state,893, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_893_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_893_'ns_1@10.242.238.89'">>}]}, {move_state,382, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_382_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_382_'ns_1@10.242.238.90'">>}]}, {move_state,638, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_638_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_638_'ns_1@10.242.238.89'">>}]}, {move_state,894, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_894_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_894_'ns_1@10.242.238.89'">>}]}, {move_state,383, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_383_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_383_'ns_1@10.242.238.90'">>}]}, {move_state,895, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_895_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_895_'ns_1@10.242.238.89'">>}]}, {move_state,639, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_639_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_639_'ns_1@10.242.238.89'">>}]}] [ns_server:debug,2014-08-19T16:51:12.953,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1020 [ns_server:info,2014-08-19T16:51:12.954,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1020 [ns_server:debug,2014-08-19T16:51:12.955,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 620, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.955,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 876, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 365, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:12.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 621, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 877, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 366, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:12.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 622, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 878, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 367, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:12.959,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1011 [ns_server:debug,2014-08-19T16:51:12.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 623, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 879, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:info,2014-08-19T16:51:12.960,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1011 [ns_server:debug,2014-08-19T16:51:12.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 368, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:12.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 624, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 880, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 369, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:12.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 625, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 881, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 370, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:12.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 626, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.965,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1002 [ns_server:debug,2014-08-19T16:51:12.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 882, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:info,2014-08-19T16:51:12.966,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1002 [ns_server:debug,2014-08-19T16:51:12.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 371, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:12.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 627, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 883, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 372, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:12.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 628, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 884, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 373, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:12.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 629, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.971,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/756 [ns_server:debug,2014-08-19T16:51:12.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 885, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 374, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:info,2014-08-19T16:51:12.972,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/756 [ns_server:debug,2014-08-19T16:51:12.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 630, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 886, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452672,964482}, tap_estimate, {replica_building,"default",364,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32252.0>, <<"replication_building_364_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:12.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 375, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:12.974,ns_1@10.242.238.88:<0.19894.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.32252.0>}, {'ns_1@10.242.238.90',<18125.27281.0>}]) [rebalance:info,2014-08-19T16:51:12.974,ns_1@10.242.238.88:<0.19883.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [ns_server:debug,2014-08-19T16:51:12.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 631, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [rebalance:info,2014-08-19T16:51:12.974,ns_1@10.242.238.88:<0.19883.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 364 on ns_1@10.242.238.88 [ns_server:debug,2014-08-19T16:51:12.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 887, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [rebalance:info,2014-08-19T16:51:12.975,ns_1@10.242.238.88:<0.19883.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:12.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 376, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:12.976,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{364, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:51:12.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 632, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 888, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 377, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:12.977,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/488 [ns_server:debug,2014-08-19T16:51:12.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 633, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:info,2014-08-19T16:51:12.978,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/488 [ns_server:debug,2014-08-19T16:51:12.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 889, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 378, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:12.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 634, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 890, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.980,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{875, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:12.980,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",875, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.19960.1>) [ns_server:debug,2014-08-19T16:51:12.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 875) [ns_server:debug,2014-08-19T16:51:12.980,ns_1@10.242.238.88:<0.19961.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:12.981,ns_1@10.242.238.88:<0.19961.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:51:12.981,ns_1@10.242.238.88:<0.19960.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 875 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:12.981,ns_1@10.242.238.88:<0.19967.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 875 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:12.981,ns_1@10.242.238.88:<0.19968.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 875 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:12.984,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/490 [ns_server:debug,2014-08-19T16:51:12.986,ns_1@10.242.238.88:<0.19973.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 875 into 'ns_1@10.242.238.89' is <18124.32259.0> [ns_server:info,2014-08-19T16:51:12.986,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/490 [ns_server:debug,2014-08-19T16:51:12.989,ns_1@10.242.238.88:<0.19973.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 875 into 'ns_1@10.242.238.91' is <18126.28338.0> [rebalance:debug,2014-08-19T16:51:12.989,ns_1@10.242.238.88:<0.19960.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 875 is <0.19973.1> [ns_server:debug,2014-08-19T16:51:12.990,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 379, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:12.990,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 635, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.990,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 891, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.990,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 380, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:12.990,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 636, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.990,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 892, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.991,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 381, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:12.991,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 637, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.991,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 893, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.991,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 382, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:12.991,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 638, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.991,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 894, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.991,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 383, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:12.991,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 895, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.991,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 639, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:12.992,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/743 [ns_server:info,2014-08-19T16:51:12.993,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/743 [ns_server:debug,2014-08-19T16:51:12.997,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/745 [ns_server:info,2014-08-19T16:51:12.998,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/745 [ns_server:debug,2014-08-19T16:51:13.003,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/744 [ns_server:info,2014-08-19T16:51:13.004,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/744 [ns_server:debug,2014-08-19T16:51:13.009,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/746 [ns_server:info,2014-08-19T16:51:13.010,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/746 [ns_server:debug,2014-08-19T16:51:13.013,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452673,3992}, tap_estimate, {replica_building,"default",875,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32259.0>, <<"replication_building_875_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:13.015,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/999 [ns_server:info,2014-08-19T16:51:13.016,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/999 [ns_server:debug,2014-08-19T16:51:13.021,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1001 [ns_server:info,2014-08-19T16:51:13.022,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1001 [ns_server:debug,2014-08-19T16:51:13.025,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452673,16634}, tap_estimate, {replica_building,"default",875,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.28338.0>, <<"replication_building_875_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:51:13.026,ns_1@10.242.238.88:<0.19984.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.28338.0>}, {'ns_1@10.242.238.89',<18124.32259.0>}]) [rebalance:info,2014-08-19T16:51:13.026,ns_1@10.242.238.88:<0.19960.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:13.027,ns_1@10.242.238.88:<0.19960.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 875 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:13.027,ns_1@10.242.238.88:<0.19960.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:13.027,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{875, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:13.028,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/487 [ns_server:info,2014-08-19T16:51:13.028,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/487 [ns_server:debug,2014-08-19T16:51:13.030,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{619, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:13.030,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",619, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']] (<0.20003.1>) [ns_server:debug,2014-08-19T16:51:13.030,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 619) [ns_server:debug,2014-08-19T16:51:13.031,ns_1@10.242.238.88:<0.20004.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:13.031,ns_1@10.242.238.88:<0.20004.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.90', nack} [rebalance:info,2014-08-19T16:51:13.031,ns_1@10.242.238.88:<0.20003.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 619 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.90',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:13.031,ns_1@10.242.238.88:<0.20009.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 619 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:13.031,ns_1@10.242.238.88:<0.20010.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 619 state change: {'ns_1@10.242.238.90',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:13.034,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/998 [ns_server:info,2014-08-19T16:51:13.035,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/998 [ns_server:debug,2014-08-19T16:51:13.036,ns_1@10.242.238.88:<0.20011.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 619 into 'ns_1@10.242.238.89' is <18124.32278.0> [ns_server:debug,2014-08-19T16:51:13.037,ns_1@10.242.238.88:<0.20011.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 619 into 'ns_1@10.242.238.90' is <18125.27300.0> [rebalance:debug,2014-08-19T16:51:13.037,ns_1@10.242.238.88:<0.20003.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 619 is <0.20011.1> [ns_server:debug,2014-08-19T16:51:13.040,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/489 [ns_server:info,2014-08-19T16:51:13.041,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/489 [ns_server:debug,2014-08-19T16:51:13.047,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/471 [ns_server:info,2014-08-19T16:51:13.048,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/471 [ns_server:debug,2014-08-19T16:51:13.053,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/475 [ns_server:info,2014-08-19T16:51:13.054,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/475 [ns_server:debug,2014-08-19T16:51:13.059,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/1000 [ns_server:info,2014-08-19T16:51:13.060,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/1000 [ns_server:debug,2014-08-19T16:51:13.063,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452673,54691}, tap_estimate, {replica_building,"default",619,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32278.0>, <<"replication_building_619_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:13.065,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/470 [ns_server:info,2014-08-19T16:51:13.066,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/470 [ns_server:debug,2014-08-19T16:51:13.071,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/479 [ns_server:info,2014-08-19T16:51:13.072,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/479 [ns_server:debug,2014-08-19T16:51:13.073,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452673,64561}, tap_estimate, {replica_building,"default",619,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27300.0>, <<"replication_building_619_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:13.074,ns_1@10.242.238.88:<0.20013.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.90',<18125.27300.0>}, {'ns_1@10.242.238.89',<18124.32278.0>}]) [rebalance:info,2014-08-19T16:51:13.074,ns_1@10.242.238.88:<0.20003.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.90 [rebalance:info,2014-08-19T16:51:13.074,ns_1@10.242.238.88:<0.20003.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 619 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:13.075,ns_1@10.242.238.88:<0.20003.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:13.075,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{619, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:13.077,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/473 [ns_server:info,2014-08-19T16:51:13.078,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/473 [ns_server:debug,2014-08-19T16:51:13.078,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{363, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}}] [rebalance:debug,2014-08-19T16:51:13.078,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",363, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']] (<0.20032.1>) [ns_server:debug,2014-08-19T16:51:13.078,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 363) [ns_server:debug,2014-08-19T16:51:13.079,ns_1@10.242.238.88:<0.20033.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:13.079,ns_1@10.242.238.88:<0.20033.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.89', nack} [rebalance:info,2014-08-19T16:51:13.079,ns_1@10.242.238.88:<0.20032.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 363 state change [{'ns_1@10.242.238.90',replica,undefined,undefined}, {'ns_1@10.242.238.89',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:13.079,ns_1@10.242.238.88:<0.20038.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 363 state change: {'ns_1@10.242.238.90',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:13.079,ns_1@10.242.238.88:<0.20039.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 363 state change: {'ns_1@10.242.238.89',replica,passive, undefined} [ns_server:debug,2014-08-19T16:51:13.085,ns_1@10.242.238.88:<0.20040.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 363 into 'ns_1@10.242.238.90' is <18125.27306.0> [ns_server:debug,2014-08-19T16:51:13.085,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/477 [ns_server:info,2014-08-19T16:51:13.086,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/477 [ns_server:debug,2014-08-19T16:51:13.087,ns_1@10.242.238.88:<0.20040.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 363 into 'ns_1@10.242.238.89' is <18124.32283.0> [rebalance:debug,2014-08-19T16:51:13.087,ns_1@10.242.238.88:<0.20032.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 363 is <0.20040.1> [ns_server:debug,2014-08-19T16:51:13.092,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/995 [ns_server:info,2014-08-19T16:51:13.093,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/995 [ns_server:debug,2014-08-19T16:51:13.098,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/987 [ns_server:info,2014-08-19T16:51:13.099,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/987 [ns_server:debug,2014-08-19T16:51:13.103,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/740 [ns_server:info,2014-08-19T16:51:13.104,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/740 [ns_server:debug,2014-08-19T16:51:13.110,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/741 [ns_server:info,2014-08-19T16:51:13.110,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/741 [ns_server:debug,2014-08-19T16:51:13.111,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452673,102410}, tap_estimate, {replica_building,"default",363,'ns_1@10.242.238.88', 'ns_1@10.242.238.90'}, 0,<18125.27306.0>, <<"replication_building_363_'ns_1@10.242.238.90'">>} [ns_server:debug,2014-08-19T16:51:13.116,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/981 [ns_server:info,2014-08-19T16:51:13.117,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/981 [ns_server:debug,2014-08-19T16:51:13.124,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/992 [ns_server:info,2014-08-19T16:51:13.125,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/992 [ns_server:debug,2014-08-19T16:51:13.125,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452673,116578}, tap_estimate, {replica_building,"default",363,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32283.0>, <<"replication_building_363_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:13.126,ns_1@10.242.238.88:<0.20042.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.89',<18124.32283.0>}, {'ns_1@10.242.238.90',<18125.27306.0>}]) [rebalance:info,2014-08-19T16:51:13.126,ns_1@10.242.238.88:<0.20032.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.89 [rebalance:info,2014-08-19T16:51:13.126,ns_1@10.242.238.88:<0.20032.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 363 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:13.127,ns_1@10.242.238.88:<0.20032.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:13.127,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{363, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}} [ns_server:debug,2014-08-19T16:51:13.130,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [{move,{874, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}}] [rebalance:debug,2014-08-19T16:51:13.130,ns_1@10.242.238.88:<0.25746.0>:ns_single_vbucket_mover:spawn_mover:28]Spawned single vbucket mover: [<0.25746.0>,'ns_1@10.242.238.88',"default",874, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']] (<0.20060.1>) [ns_server:debug,2014-08-19T16:51:13.130,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_start:253]Noted vbucket move start (vbucket 874) [ns_server:debug,2014-08-19T16:51:13.131,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/980 [ns_server:debug,2014-08-19T16:51:13.131,ns_1@10.242.238.88:<0.20061.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.88', nack} [ns_server:debug,2014-08-19T16:51:13.131,ns_1@10.242.238.88:<0.20061.1>:ns_single_vbucket_mover:mover_inner:141]Got nack for inhibited_view_compaction. Thats normal: {'ns_1@10.242.238.91', nack} [rebalance:info,2014-08-19T16:51:13.131,ns_1@10.242.238.88:<0.20060.1>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 874 state change [{'ns_1@10.242.238.89',replica,undefined,undefined}, {'ns_1@10.242.238.91',replica,passive,undefined}] [rebalance:info,2014-08-19T16:51:13.131,ns_1@10.242.238.88:<0.20067.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 874 state change: {'ns_1@10.242.238.89',replica,undefined, undefined} [rebalance:info,2014-08-19T16:51:13.131,ns_1@10.242.238.88:<0.20068.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 874 state change: {'ns_1@10.242.238.91',replica,passive, undefined} [ns_server:info,2014-08-19T16:51:13.131,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/980 [ns_server:debug,2014-08-19T16:51:13.138,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/988 [ns_server:debug,2014-08-19T16:51:13.139,ns_1@10.242.238.88:<0.20069.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 874 into 'ns_1@10.242.238.89' is <18124.32289.0> [ns_server:info,2014-08-19T16:51:13.139,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/988 [ns_server:debug,2014-08-19T16:51:13.141,ns_1@10.242.238.88:<0.20069.1>:ns_replicas_builder_utils:spawn_replica_builder:88]Replica building ebucketmigrator for vbucket 874 into 'ns_1@10.242.238.91' is <18126.28358.0> [rebalance:debug,2014-08-19T16:51:13.141,ns_1@10.242.238.88:<0.20060.1>:ns_single_vbucket_mover:mover_inner:170]child replicas builder for vbucket 874 is <0.20069.1> [ns_server:debug,2014-08-19T16:51:13.145,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/983 [ns_server:info,2014-08-19T16:51:13.146,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/983 [ns_server:debug,2014-08-19T16:51:13.151,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/982 [ns_server:info,2014-08-19T16:51:13.152,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/982 [ns_server:debug,2014-08-19T16:51:13.157,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/991 [ns_server:info,2014-08-19T16:51:13.158,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/991 [ns_server:debug,2014-08-19T16:51:13.163,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/984 [ns_server:info,2014-08-19T16:51:13.163,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/984 [ns_server:debug,2014-08-19T16:51:13.165,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452673,156852}, tap_estimate, {replica_building,"default",874,'ns_1@10.242.238.88', 'ns_1@10.242.238.89'}, 0,<18124.32289.0>, <<"replication_building_874_'ns_1@10.242.238.89'">>} [ns_server:debug,2014-08-19T16:51:13.168,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/989 [ns_server:info,2014-08-19T16:51:13.169,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/989 [ns_server:debug,2014-08-19T16:51:13.174,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/993 [ns_server:info,2014-08-19T16:51:13.174,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/993 [ns_server:debug,2014-08-19T16:51:13.179,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/997 [ns_server:info,2014-08-19T16:51:13.179,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/997 [ns_server:debug,2014-08-19T16:51:13.181,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_tap_estimate:258]Seeing tap_estimate: {{1408,452673,172178}, tap_estimate, {replica_building,"default",874,'ns_1@10.242.238.88', 'ns_1@10.242.238.91'}, 0,<18126.28358.0>, <<"replication_building_874_'ns_1@10.242.238.91'">>} [ns_server:debug,2014-08-19T16:51:13.181,ns_1@10.242.238.88:<0.20071.1>:ns_single_vbucket_mover:wait_backfill_determination:99]Had backfill rvs: [true,true]([{'ns_1@10.242.238.91',<18126.28358.0>}, {'ns_1@10.242.238.89',<18124.32289.0>}]) [rebalance:info,2014-08-19T16:51:13.181,ns_1@10.242.238.88:<0.20060.1>:janitor_agent:initiate_indexing:552]default: Doing initiate_indexing call for ns_1@10.242.238.91 [rebalance:info,2014-08-19T16:51:13.182,ns_1@10.242.238.88:<0.20060.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 874 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:13.183,ns_1@10.242.238.88:<0.20060.1>:ns_single_vbucket_mover:mover_inner:184]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:13.183,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_backfill_done:276]noted backfill done: {move,{874, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}} [ns_server:debug,2014-08-19T16:51:13.184,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:51:13.185,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/996 [ns_server:info,2014-08-19T16:51:13.186,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/996 [ns_server:debug,2014-08-19T16:51:13.191,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/994 [ns_server:info,2014-08-19T16:51:13.192,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/994 [ns_server:debug,2014-08-19T16:51:13.198,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/990 [ns_server:info,2014-08-19T16:51:13.199,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/990 [ns_server:debug,2014-08-19T16:51:13.204,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/486 [ns_server:info,2014-08-19T16:51:13.205,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/486 [ns_server:debug,2014-08-19T16:51:13.211,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/736 [ns_server:info,2014-08-19T16:51:13.212,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/736 [ns_server:debug,2014-08-19T16:51:13.217,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/729 [ns_server:info,2014-08-19T16:51:13.218,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/729 [ns_server:debug,2014-08-19T16:51:13.223,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/730 [ns_server:info,2014-08-19T16:51:13.224,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/730 [ns_server:debug,2014-08-19T16:51:13.230,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/481 [ns_server:info,2014-08-19T16:51:13.231,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/481 [ns_server:debug,2014-08-19T16:51:13.236,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/742 [ns_server:info,2014-08-19T16:51:13.237,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/742 [ns_server:debug,2014-08-19T16:51:13.242,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/478 [ns_server:info,2014-08-19T16:51:13.243,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/478 [ns_server:debug,2014-08-19T16:51:13.250,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/480 [ns_server:info,2014-08-19T16:51:13.251,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/480 [ns_server:debug,2014-08-19T16:51:13.255,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/476 [ns_server:info,2014-08-19T16:51:13.256,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/476 [ns_server:debug,2014-08-19T16:51:13.261,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/986 [ns_server:info,2014-08-19T16:51:13.262,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/986 [ns_server:debug,2014-08-19T16:51:13.268,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/731 [ns_server:info,2014-08-19T16:51:13.269,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/731 [ns_server:debug,2014-08-19T16:51:13.274,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/737 [ns_server:info,2014-08-19T16:51:13.275,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/737 [ns_server:debug,2014-08-19T16:51:13.280,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/985 [ns_server:info,2014-08-19T16:51:13.280,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/985 [ns_server:debug,2014-08-19T16:51:13.286,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/738 [ns_server:info,2014-08-19T16:51:13.287,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/738 [ns_server:debug,2014-08-19T16:51:13.292,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/733 [ns_server:info,2014-08-19T16:51:13.293,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/733 [ns_server:debug,2014-08-19T16:51:13.297,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/728 [ns_server:info,2014-08-19T16:51:13.298,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/728 [ns_server:debug,2014-08-19T16:51:13.303,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/484 [ns_server:info,2014-08-19T16:51:13.304,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/484 [ns_server:debug,2014-08-19T16:51:13.309,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/727 [ns_server:info,2014-08-19T16:51:13.310,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/727 [ns_server:debug,2014-08-19T16:51:13.314,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/735 [ns_server:info,2014-08-19T16:51:13.315,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/735 [ns_server:debug,2014-08-19T16:51:13.321,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/472 [ns_server:info,2014-08-19T16:51:13.322,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/472 [ns_server:debug,2014-08-19T16:51:13.328,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/739 [ns_server:info,2014-08-19T16:51:13.328,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/739 [ns_server:debug,2014-08-19T16:51:13.333,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/482 [ns_server:info,2014-08-19T16:51:13.334,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/482 [ns_server:debug,2014-08-19T16:51:13.339,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/734 [ns_server:info,2014-08-19T16:51:13.340,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/734 [ns_server:debug,2014-08-19T16:51:13.345,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/732 [ns_server:info,2014-08-19T16:51:13.346,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/732 [ns_server:debug,2014-08-19T16:51:13.352,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/483 [ns_server:info,2014-08-19T16:51:13.353,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/483 [ns_server:debug,2014-08-19T16:51:13.358,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/726 [ns_server:info,2014-08-19T16:51:13.359,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/726 [ns_server:debug,2014-08-19T16:51:13.364,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/485 [ns_server:info,2014-08-19T16:51:13.364,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/485 [ns_server:debug,2014-08-19T16:51:13.369,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/474 [ns_server:info,2014-08-19T16:51:13.370,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/474 [ns_server:debug,2014-08-19T16:51:13.376,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/464 [ns_server:info,2014-08-19T16:51:13.376,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/464 [ns_server:debug,2014-08-19T16:51:13.382,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/466 [ns_server:info,2014-08-19T16:51:13.382,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/466 [ns_server:debug,2014-08-19T16:51:13.388,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/465 [ns_server:info,2014-08-19T16:51:13.388,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/465 [ns_server:debug,2014-08-19T16:51:13.393,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/467 [ns_server:info,2014-08-19T16:51:13.394,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/467 [ns_server:debug,2014-08-19T16:51:13.398,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/469 [ns_server:info,2014-08-19T16:51:13.399,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/469 [ns_server:debug,2014-08-19T16:51:13.404,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/468 [ns_server:info,2014-08-19T16:51:13.405,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/468 [ns_server:debug,2014-08-19T16:51:13.410,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/718 [ns_server:info,2014-08-19T16:51:13.411,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/718 [ns_server:debug,2014-08-19T16:51:13.415,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/720 [ns_server:info,2014-08-19T16:51:13.416,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/720 [ns_server:debug,2014-08-19T16:51:13.421,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/723 [ns_server:info,2014-08-19T16:51:13.422,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/723 [ns_server:debug,2014-08-19T16:51:13.427,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/719 [ns_server:info,2014-08-19T16:51:13.428,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/719 [ns_server:debug,2014-08-19T16:51:13.433,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/721 [ns_server:info,2014-08-19T16:51:13.434,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/721 [ns_server:debug,2014-08-19T16:51:13.438,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/722 [ns_server:info,2014-08-19T16:51:13.439,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/722 [ns_server:debug,2014-08-19T16:51:13.444,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/724 [ns_server:info,2014-08-19T16:51:13.445,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/724 [ns_server:debug,2014-08-19T16:51:13.450,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/725 [ns_server:info,2014-08-19T16:51:13.451,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/725 [ns_server:debug,2014-08-19T16:51:13.456,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/972 [ns_server:info,2014-08-19T16:51:13.456,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/972 [ns_server:debug,2014-08-19T16:51:13.461,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/974 [ns_server:info,2014-08-19T16:51:13.462,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/974 [ns_server:debug,2014-08-19T16:51:13.467,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/976 [ns_server:info,2014-08-19T16:51:13.468,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/976 [ns_server:debug,2014-08-19T16:51:13.472,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/973 [ns_server:info,2014-08-19T16:51:13.473,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/973 [ns_server:debug,2014-08-19T16:51:13.478,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/975 [ns_server:info,2014-08-19T16:51:13.479,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/975 [ns_server:debug,2014-08-19T16:51:13.484,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/978 [ns_server:info,2014-08-19T16:51:13.485,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/978 [ns_server:debug,2014-08-19T16:51:13.489,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/977 [ns_server:info,2014-08-19T16:51:13.490,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/977 [ns_server:debug,2014-08-19T16:51:13.495,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/462 [ns_server:info,2014-08-19T16:51:13.496,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/462 [ns_server:debug,2014-08-19T16:51:13.501,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/452 [ns_server:info,2014-08-19T16:51:13.502,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/452 [ns_server:debug,2014-08-19T16:51:13.507,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/448 [ns_server:info,2014-08-19T16:51:13.508,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/448 [ns_server:debug,2014-08-19T16:51:13.513,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/463 [ns_server:info,2014-08-19T16:51:13.514,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/463 [ns_server:debug,2014-08-19T16:51:13.518,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/450 [ns_server:info,2014-08-19T16:51:13.519,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/450 [ns_server:debug,2014-08-19T16:51:13.524,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/708 [ns_server:info,2014-08-19T16:51:13.525,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/708 [ns_server:debug,2014-08-19T16:51:13.530,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/461 [ns_server:info,2014-08-19T16:51:13.531,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/461 [ns_server:debug,2014-08-19T16:51:13.536,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/713 [ns_server:info,2014-08-19T16:51:13.538,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/713 [ns_server:debug,2014-08-19T16:51:13.542,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/455 [ns_server:info,2014-08-19T16:51:13.543,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/455 [ns_server:debug,2014-08-19T16:51:13.549,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/966 [ns_server:info,2014-08-19T16:51:13.550,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/966 [ns_server:debug,2014-08-19T16:51:13.554,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/460 [ns_server:info,2014-08-19T16:51:13.555,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/460 [ns_server:debug,2014-08-19T16:51:13.559,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/707 [ns_server:info,2014-08-19T16:51:13.560,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/707 [ns_server:debug,2014-08-19T16:51:13.565,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/459 [ns_server:info,2014-08-19T16:51:13.565,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/459 [ns_server:debug,2014-08-19T16:51:13.570,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/962 [ns_server:info,2014-08-19T16:51:13.571,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/962 [ns_server:debug,2014-08-19T16:51:13.576,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/457 [ns_server:info,2014-08-19T16:51:13.577,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/457 [ns_server:debug,2014-08-19T16:51:13.581,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/710 [ns_server:info,2014-08-19T16:51:13.582,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/710 [ns_server:debug,2014-08-19T16:51:13.587,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/704 [ns_server:info,2014-08-19T16:51:13.588,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/704 [ns_server:debug,2014-08-19T16:51:13.592,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/709 [ns_server:info,2014-08-19T16:51:13.593,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/709 [ns_server:debug,2014-08-19T16:51:13.598,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/453 [ns_server:info,2014-08-19T16:51:13.599,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/453 [ns_server:debug,2014-08-19T16:51:13.603,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/454 [ns_server:info,2014-08-19T16:51:13.604,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/454 [ns_server:debug,2014-08-19T16:51:13.609,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/961 [ns_server:info,2014-08-19T16:51:13.609,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/961 [ns_server:debug,2014-08-19T16:51:13.614,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/979 [ns_server:info,2014-08-19T16:51:13.615,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/979 [ns_server:debug,2014-08-19T16:51:13.618,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/963 [ns_server:info,2014-08-19T16:51:13.619,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/963 [ns_server:debug,2014-08-19T16:51:13.624,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/451 [ns_server:info,2014-08-19T16:51:13.624,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/451 [ns_server:debug,2014-08-19T16:51:13.628,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/960 [ns_server:info,2014-08-19T16:51:13.629,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/960 [ns_server:debug,2014-08-19T16:51:13.632,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/715 [ns_server:info,2014-08-19T16:51:13.633,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/715 [ns_server:debug,2014-08-19T16:51:13.636,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/705 [ns_server:info,2014-08-19T16:51:13.637,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/705 [ns_server:debug,2014-08-19T16:51:13.640,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/714 [ns_server:info,2014-08-19T16:51:13.641,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/714 [ns_server:debug,2014-08-19T16:51:13.644,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/968 [ns_server:info,2014-08-19T16:51:13.645,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/968 [ns_server:debug,2014-08-19T16:51:13.649,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/964 [ns_server:info,2014-08-19T16:51:13.650,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/964 [ns_server:debug,2014-08-19T16:51:13.653,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/716 [ns_server:info,2014-08-19T16:51:13.654,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/716 [ns_server:debug,2014-08-19T16:51:13.657,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/706 [ns_server:info,2014-08-19T16:51:13.658,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/706 [ns_server:debug,2014-08-19T16:51:13.662,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/717 [ns_server:info,2014-08-19T16:51:13.662,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/717 [ns_server:debug,2014-08-19T16:51:13.667,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/969 [ns_server:info,2014-08-19T16:51:13.668,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/969 [ns_server:debug,2014-08-19T16:51:13.671,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/458 [ns_server:info,2014-08-19T16:51:13.672,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/458 [ns_server:debug,2014-08-19T16:51:13.675,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/970 [ns_server:info,2014-08-19T16:51:13.676,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/970 [ns_server:debug,2014-08-19T16:51:13.681,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/456 [ns_server:info,2014-08-19T16:51:13.682,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/456 [ns_server:debug,2014-08-19T16:51:13.687,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/967 [ns_server:info,2014-08-19T16:51:13.687,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/967 [ns_server:debug,2014-08-19T16:51:13.691,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/712 [ns_server:info,2014-08-19T16:51:13.692,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/712 [ns_server:debug,2014-08-19T16:51:13.695,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/965 [ns_server:info,2014-08-19T16:51:13.696,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/965 [ns_server:debug,2014-08-19T16:51:13.699,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/711 [ns_server:info,2014-08-19T16:51:13.699,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/711 [ns_server:debug,2014-08-19T16:51:13.703,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/449 [ns_server:info,2014-08-19T16:51:13.703,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/449 [ns_server:debug,2014-08-19T16:51:13.706,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/971 [ns_server:info,2014-08-19T16:51:13.707,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/971 [ns_server:debug,2014-08-19T16:51:13.712,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/440 [ns_server:info,2014-08-19T16:51:13.713,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/440 [ns_server:debug,2014-08-19T16:51:13.717,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/442 [ns_server:info,2014-08-19T16:51:13.717,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/442 [ns_server:debug,2014-08-19T16:51:13.722,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/447 [ns_server:info,2014-08-19T16:51:13.723,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/447 [ns_server:debug,2014-08-19T16:51:13.726,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/446 [ns_server:info,2014-08-19T16:51:13.727,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/446 [ns_server:debug,2014-08-19T16:51:13.731,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/445 [ns_server:info,2014-08-19T16:51:13.732,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/445 [ns_server:debug,2014-08-19T16:51:13.737,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/444 [ns_server:info,2014-08-19T16:51:13.738,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/444 [ns_server:debug,2014-08-19T16:51:13.743,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/443 [ns_server:info,2014-08-19T16:51:13.744,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/443 [ns_server:debug,2014-08-19T16:51:13.748,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/696 [ns_server:info,2014-08-19T16:51:13.749,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/696 [ns_server:debug,2014-08-19T16:51:13.754,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/698 [ns_server:info,2014-08-19T16:51:13.755,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/698 [ns_server:debug,2014-08-19T16:51:13.760,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/702 [ns_server:info,2014-08-19T16:51:13.760,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/702 [ns_server:debug,2014-08-19T16:51:13.765,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/700 [ns_server:info,2014-08-19T16:51:13.766,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/700 [ns_server:debug,2014-08-19T16:51:13.771,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/701 [ns_server:info,2014-08-19T16:51:13.772,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/701 [ns_server:debug,2014-08-19T16:51:13.777,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/699 [ns_server:info,2014-08-19T16:51:13.778,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/699 [ns_server:debug,2014-08-19T16:51:13.783,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/697 [ns_server:info,2014-08-19T16:51:13.783,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/697 [ns_server:debug,2014-08-19T16:51:13.788,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/703 [ns_server:info,2014-08-19T16:51:13.789,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/703 [ns_server:debug,2014-08-19T16:51:13.793,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/952 [ns_server:info,2014-08-19T16:51:13.794,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/952 [ns_server:debug,2014-08-19T16:51:13.799,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/954 [ns_server:info,2014-08-19T16:51:13.799,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/954 [ns_server:debug,2014-08-19T16:51:13.804,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/953 [ns_server:info,2014-08-19T16:51:13.805,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/953 [ns_server:debug,2014-08-19T16:51:13.810,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/956 [ns_server:info,2014-08-19T16:51:13.811,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/956 [ns_server:debug,2014-08-19T16:51:13.816,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/958 [ns_server:info,2014-08-19T16:51:13.817,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/958 [ns_server:debug,2014-08-19T16:51:13.822,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/955 [ns_server:info,2014-08-19T16:51:13.823,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/955 [ns_server:debug,2014-08-19T16:51:13.828,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/957 [ns_server:info,2014-08-19T16:51:13.829,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/957 [ns_server:debug,2014-08-19T16:51:13.833,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/959 [ns_server:info,2014-08-19T16:51:13.834,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/959 [ns_server:debug,2014-08-19T16:51:13.839,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/428 [ns_server:info,2014-08-19T16:51:13.840,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/428 [ns_server:debug,2014-08-19T16:51:13.845,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/684 [ns_server:info,2014-08-19T16:51:13.846,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/684 [ns_server:debug,2014-08-19T16:51:13.850,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/686 [ns_server:info,2014-08-19T16:51:13.851,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/686 [ns_server:debug,2014-08-19T16:51:13.856,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/441 [ns_server:info,2014-08-19T16:51:13.856,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/441 [ns_server:debug,2014-08-19T16:51:13.861,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/438 [ns_server:info,2014-08-19T16:51:13.862,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/438 [ns_server:debug,2014-08-19T16:51:13.867,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/432 [ns_server:info,2014-08-19T16:51:13.868,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/432 [ns_server:debug,2014-08-19T16:51:13.872,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/434 [ns_server:info,2014-08-19T16:51:13.873,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/434 [ns_server:debug,2014-08-19T16:51:13.879,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/950 [ns_server:info,2014-08-19T16:51:13.880,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/950 [ns_server:debug,2014-08-19T16:51:13.884,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/949 [ns_server:info,2014-08-19T16:51:13.885,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/949 [ns_server:debug,2014-08-19T16:51:13.889,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/948 [ns_server:info,2014-08-19T16:51:13.890,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/948 [ns_server:debug,2014-08-19T16:51:13.895,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/439 [ns_server:info,2014-08-19T16:51:13.896,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/439 [ns_server:debug,2014-08-19T16:51:13.901,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/945 [ns_server:info,2014-08-19T16:51:13.902,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/945 [ns_server:debug,2014-08-19T16:51:13.906,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/942 [ns_server:info,2014-08-19T16:51:13.907,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/942 [ns_server:debug,2014-08-19T16:51:13.911,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/691 [ns_server:info,2014-08-19T16:51:13.912,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/691 [ns_server:debug,2014-08-19T16:51:13.917,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/685 [ns_server:info,2014-08-19T16:51:13.918,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/685 [ns_server:debug,2014-08-19T16:51:13.924,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/938 [ns_server:info,2014-08-19T16:51:13.925,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/938 [ns_server:debug,2014-08-19T16:51:13.929,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/692 [ns_server:info,2014-08-19T16:51:13.930,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/692 [ns_server:debug,2014-08-19T16:51:13.935,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/688 [ns_server:info,2014-08-19T16:51:13.936,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/688 [ns_server:debug,2014-08-19T16:51:13.940,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/951 [ns_server:info,2014-08-19T16:51:13.941,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/951 [ns_server:debug,2014-08-19T16:51:13.945,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/940 [ns_server:info,2014-08-19T16:51:13.946,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/940 [ns_server:debug,2014-08-19T16:51:13.951,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/946 [ns_server:info,2014-08-19T16:51:13.952,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/946 [ns_server:debug,2014-08-19T16:51:13.957,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/435 [ns_server:info,2014-08-19T16:51:13.958,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/435 [ns_server:debug,2014-08-19T16:51:13.962,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/693 [ns_server:info,2014-08-19T16:51:13.963,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/693 [ns_server:debug,2014-08-19T16:51:13.968,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/941 [ns_server:info,2014-08-19T16:51:13.969,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/941 [ns_server:debug,2014-08-19T16:51:13.974,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/683 [ns_server:info,2014-08-19T16:51:13.974,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/683 [ns_server:debug,2014-08-19T16:51:13.979,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/944 [ns_server:info,2014-08-19T16:51:13.980,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/944 [ns_server:debug,2014-08-19T16:51:13.984,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/695 [ns_server:info,2014-08-19T16:51:13.985,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/695 [ns_server:debug,2014-08-19T16:51:13.991,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/939 [ns_server:info,2014-08-19T16:51:13.991,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/939 [ns_server:debug,2014-08-19T16:51:13.996,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/429 [ns_server:info,2014-08-19T16:51:13.997,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/429 [ns_server:debug,2014-08-19T16:51:14.001,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/436 [ns_server:info,2014-08-19T16:51:14.002,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/436 [ns_server:debug,2014-08-19T16:51:14.006,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/689 [ns_server:info,2014-08-19T16:51:14.007,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/689 [ns_server:debug,2014-08-19T16:51:14.012,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/694 [ns_server:info,2014-08-19T16:51:14.013,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/694 [ns_server:debug,2014-08-19T16:51:14.017,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/430 [ns_server:info,2014-08-19T16:51:14.018,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/430 [ns_server:debug,2014-08-19T16:51:14.022,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/437 [ns_server:info,2014-08-19T16:51:14.023,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/437 [ns_server:debug,2014-08-19T16:51:14.027,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/433 [ns_server:info,2014-08-19T16:51:14.028,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/433 [ns_server:debug,2014-08-19T16:51:14.033,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/690 [ns_server:info,2014-08-19T16:51:14.034,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/690 [ns_server:debug,2014-08-19T16:51:14.038,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/943 [ns_server:info,2014-08-19T16:51:14.039,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/943 [ns_server:debug,2014-08-19T16:51:14.044,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/947 [ns_server:info,2014-08-19T16:51:14.045,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/947 [ns_server:debug,2014-08-19T16:51:14.049,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/431 [ns_server:info,2014-08-19T16:51:14.050,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/431 [ns_server:debug,2014-08-19T16:51:14.055,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/427 [ns_server:info,2014-08-19T16:51:14.056,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/427 [ns_server:debug,2014-08-19T16:51:14.060,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/687 [ns_server:info,2014-08-19T16:51:14.061,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/687 [ns_server:debug,2014-08-19T16:51:14.066,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/682 [ns_server:info,2014-08-19T16:51:14.067,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/682 [ns_server:debug,2014-08-19T16:51:14.071,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/426 [ns_server:info,2014-08-19T16:51:14.072,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/426 [ns_server:debug,2014-08-19T16:51:14.076,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/937 [ns_server:info,2014-08-19T16:51:14.077,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/937 [ns_server:debug,2014-08-19T16:51:14.082,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/936 [ns_server:info,2014-08-19T16:51:14.082,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/936 [ns_server:debug,2014-08-19T16:51:14.087,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/663 [ns_server:info,2014-08-19T16:51:14.088,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/663 [ns_server:debug,2014-08-19T16:51:14.093,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/669 [ns_server:info,2014-08-19T16:51:14.093,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/669 [ns_server:debug,2014-08-19T16:51:14.098,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/665 [ns_server:info,2014-08-19T16:51:14.099,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/665 [ns_server:debug,2014-08-19T16:51:14.103,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/667 [ns_server:info,2014-08-19T16:51:14.104,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/667 [ns_server:debug,2014-08-19T16:51:14.108,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/662 [ns_server:info,2014-08-19T16:51:14.109,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/662 [ns_server:debug,2014-08-19T16:51:14.113,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/664 [ns_server:info,2014-08-19T16:51:14.114,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/664 [ns_server:debug,2014-08-19T16:51:14.119,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/671 [ns_server:info,2014-08-19T16:51:14.120,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/671 [ns_server:debug,2014-08-19T16:51:14.124,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/675 [ns_server:info,2014-08-19T16:51:14.125,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/675 [ns_server:debug,2014-08-19T16:51:14.129,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/674 [ns_server:info,2014-08-19T16:51:14.130,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/674 [ns_server:debug,2014-08-19T16:51:14.134,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/676 [ns_server:info,2014-08-19T16:51:14.135,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/676 [ns_server:debug,2014-08-19T16:51:14.139,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/668 [ns_server:info,2014-08-19T16:51:14.140,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/668 [ns_server:debug,2014-08-19T16:51:14.144,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/926 [ns_server:info,2014-08-19T16:51:14.145,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/926 [ns_server:debug,2014-08-19T16:51:14.149,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/933 [ns_server:info,2014-08-19T16:51:14.150,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/933 [ns_server:debug,2014-08-19T16:51:14.154,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/410 [ns_server:info,2014-08-19T16:51:14.155,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/410 [ns_server:debug,2014-08-19T16:51:14.160,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/414 [ns_server:info,2014-08-19T16:51:14.161,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/414 [ns_server:debug,2014-08-19T16:51:14.165,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/407 [ns_server:info,2014-08-19T16:51:14.165,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/407 [ns_server:debug,2014-08-19T16:51:14.170,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/415 [ns_server:info,2014-08-19T16:51:14.170,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/415 [ns_server:debug,2014-08-19T16:51:14.174,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/930 [ns_server:info,2014-08-19T16:51:14.175,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/930 [ns_server:debug,2014-08-19T16:51:14.179,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/411 [ns_server:info,2014-08-19T16:51:14.180,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/411 [ns_server:debug,2014-08-19T16:51:14.185,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/922 [ns_server:info,2014-08-19T16:51:14.186,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/922 [ns_server:debug,2014-08-19T16:51:14.190,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/424 [ns_server:info,2014-08-19T16:51:14.191,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/424 [ns_server:debug,2014-08-19T16:51:14.195,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/419 [ns_server:info,2014-08-19T16:51:14.196,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/419 [ns_server:debug,2014-08-19T16:51:14.200,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/423 [ns_server:info,2014-08-19T16:51:14.201,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/423 [ns_server:debug,2014-08-19T16:51:14.205,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/924 [ns_server:info,2014-08-19T16:51:14.206,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/924 [ns_server:debug,2014-08-19T16:51:14.210,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/418 [ns_server:info,2014-08-19T16:51:14.211,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/418 [ns_server:debug,2014-08-19T16:51:14.215,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/425 [ns_server:info,2014-08-19T16:51:14.215,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/425 [ns_server:debug,2014-08-19T16:51:14.220,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/409 [ns_server:info,2014-08-19T16:51:14.221,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/409 [ns_server:debug,2014-08-19T16:51:14.225,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/416 [ns_server:info,2014-08-19T16:51:14.226,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/416 [ns_server:debug,2014-08-19T16:51:14.230,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/421 [ns_server:info,2014-08-19T16:51:14.231,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/421 [ns_server:debug,2014-08-19T16:51:14.235,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/422 [ns_server:info,2014-08-19T16:51:14.235,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/422 [ns_server:debug,2014-08-19T16:51:14.240,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/928 [ns_server:info,2014-08-19T16:51:14.240,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/928 [ns_server:debug,2014-08-19T16:51:14.245,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/934 [ns_server:info,2014-08-19T16:51:14.245,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/934 [ns_server:debug,2014-08-19T16:51:14.250,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/413 [ns_server:info,2014-08-19T16:51:14.250,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/413 [ns_server:debug,2014-08-19T16:51:14.254,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/408 [ns_server:info,2014-08-19T16:51:14.255,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/408 [ns_server:debug,2014-08-19T16:51:14.259,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/917 [ns_server:info,2014-08-19T16:51:14.260,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/917 [ns_server:debug,2014-08-19T16:51:14.264,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/680 [ns_server:info,2014-08-19T16:51:14.265,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/680 [ns_server:debug,2014-08-19T16:51:14.269,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/412 [ns_server:info,2014-08-19T16:51:14.270,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/412 [ns_server:debug,2014-08-19T16:51:14.274,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/406 [ns_server:info,2014-08-19T16:51:14.275,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/406 [ns_server:debug,2014-08-19T16:51:14.279,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/679 [ns_server:info,2014-08-19T16:51:14.280,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/679 [ns_server:debug,2014-08-19T16:51:14.285,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/935 [ns_server:info,2014-08-19T16:51:14.286,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/935 [ns_server:debug,2014-08-19T16:51:14.291,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/927 [ns_server:info,2014-08-19T16:51:14.292,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/927 [ns_server:debug,2014-08-19T16:51:14.296,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/925 [ns_server:info,2014-08-19T16:51:14.297,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/925 [ns_server:debug,2014-08-19T16:51:14.301,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/918 [ns_server:info,2014-08-19T16:51:14.302,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/918 [ns_server:debug,2014-08-19T16:51:14.306,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/920 [ns_server:info,2014-08-19T16:51:14.307,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/920 [ns_server:debug,2014-08-19T16:51:14.311,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/681 [ns_server:info,2014-08-19T16:51:14.312,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/681 [ns_server:debug,2014-08-19T16:51:14.317,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/420 [ns_server:info,2014-08-19T16:51:14.318,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/420 [ns_server:debug,2014-08-19T16:51:14.323,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/673 [ns_server:info,2014-08-19T16:51:14.324,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/673 [ns_server:debug,2014-08-19T16:51:14.328,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/929 [ns_server:info,2014-08-19T16:51:14.329,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/929 [ns_server:debug,2014-08-19T16:51:14.333,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/921 [ns_server:info,2014-08-19T16:51:14.334,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/921 [ns_server:debug,2014-08-19T16:51:14.338,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/417 [ns_server:info,2014-08-19T16:51:14.339,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/417 [ns_server:debug,2014-08-19T16:51:14.343,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/677 [ns_server:info,2014-08-19T16:51:14.344,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/677 [ns_server:debug,2014-08-19T16:51:14.348,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/923 [ns_server:info,2014-08-19T16:51:14.349,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/923 [ns_server:debug,2014-08-19T16:51:14.353,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/916 [ns_server:info,2014-08-19T16:51:14.354,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/916 [ns_server:debug,2014-08-19T16:51:14.358,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/670 [ns_server:info,2014-08-19T16:51:14.359,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/670 [ns_server:debug,2014-08-19T16:51:14.363,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/932 [ns_server:info,2014-08-19T16:51:14.364,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/932 [ns_server:debug,2014-08-19T16:51:14.367,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/672 [ns_server:info,2014-08-19T16:51:14.368,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/672 [ns_server:debug,2014-08-19T16:51:14.372,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/666 [ns_server:info,2014-08-19T16:51:14.373,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/666 [ns_server:debug,2014-08-19T16:51:14.377,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/931 [ns_server:info,2014-08-19T16:51:14.378,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/931 [ns_server:debug,2014-08-19T16:51:14.382,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/919 [ns_server:info,2014-08-19T16:51:14.383,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/919 [ns_server:debug,2014-08-19T16:51:14.387,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/678 [ns_server:info,2014-08-19T16:51:14.388,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/678 [ns_server:debug,2014-08-19T16:51:14.391,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/396 [ns_server:info,2014-08-19T16:51:14.392,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/396 [ns_server:debug,2014-08-19T16:51:14.395,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/399 [ns_server:info,2014-08-19T16:51:14.396,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/399 [ns_server:debug,2014-08-19T16:51:14.399,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/401 [ns_server:info,2014-08-19T16:51:14.399,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/401 [ns_server:debug,2014-08-19T16:51:14.402,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/400 [ns_server:info,2014-08-19T16:51:14.403,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/400 [ns_server:debug,2014-08-19T16:51:14.406,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/398 [ns_server:info,2014-08-19T16:51:14.407,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/398 [ns_server:debug,2014-08-19T16:51:14.409,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/402 [ns_server:info,2014-08-19T16:51:14.410,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/402 [ns_server:debug,2014-08-19T16:51:14.414,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/403 [ns_server:info,2014-08-19T16:51:14.415,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/403 [ns_server:debug,2014-08-19T16:51:14.420,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/405 [ns_server:info,2014-08-19T16:51:14.420,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/405 [ns_server:debug,2014-08-19T16:51:14.424,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/404 [ns_server:info,2014-08-19T16:51:14.424,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/404 [ns_server:debug,2014-08-19T16:51:14.427,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/653 [ns_server:info,2014-08-19T16:51:14.428,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/653 [ns_server:debug,2014-08-19T16:51:14.431,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/659 [ns_server:info,2014-08-19T16:51:14.431,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/659 [ns_server:debug,2014-08-19T16:51:14.434,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/655 [ns_server:info,2014-08-19T16:51:14.435,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/655 [ns_server:debug,2014-08-19T16:51:14.439,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/660 [ns_server:info,2014-08-19T16:51:14.440,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/660 [ns_server:debug,2014-08-19T16:51:14.443,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/652 [ns_server:info,2014-08-19T16:51:14.443,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/652 [ns_server:debug,2014-08-19T16:51:14.446,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/661 [ns_server:info,2014-08-19T16:51:14.447,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/661 [ns_server:debug,2014-08-19T16:51:14.450,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/654 [ns_server:info,2014-08-19T16:51:14.451,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/654 [ns_server:debug,2014-08-19T16:51:14.454,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/658 [ns_server:info,2014-08-19T16:51:14.455,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/658 [ns_server:debug,2014-08-19T16:51:14.457,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/657 [ns_server:info,2014-08-19T16:51:14.458,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/657 [ns_server:debug,2014-08-19T16:51:14.462,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/656 [ns_server:info,2014-08-19T16:51:14.463,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/656 [ns_server:debug,2014-08-19T16:51:14.467,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/906 [ns_server:info,2014-08-19T16:51:14.470,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/906 [ns_server:debug,2014-08-19T16:51:14.473,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/907 [ns_server:info,2014-08-19T16:51:14.474,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/907 [ns_server:debug,2014-08-19T16:51:14.478,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/908 [ns_server:info,2014-08-19T16:51:14.479,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/908 [ns_server:debug,2014-08-19T16:51:14.483,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/910 [ns_server:info,2014-08-19T16:51:14.484,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/910 [ns_server:debug,2014-08-19T16:51:14.488,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/912 [ns_server:info,2014-08-19T16:51:14.489,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/912 [ns_server:debug,2014-08-19T16:51:14.493,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/915 [ns_server:info,2014-08-19T16:51:14.494,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/915 [ns_server:debug,2014-08-19T16:51:14.498,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/913 [ns_server:info,2014-08-19T16:51:14.499,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/913 [ns_server:debug,2014-08-19T16:51:14.503,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/911 [ns_server:info,2014-08-19T16:51:14.504,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/911 [ns_server:debug,2014-08-19T16:51:14.508,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/914 [ns_server:info,2014-08-19T16:51:14.509,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/914 [ns_server:debug,2014-08-19T16:51:14.513,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/387 [ns_server:info,2014-08-19T16:51:14.514,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/387 [ns_server:debug,2014-08-19T16:51:14.517,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/385 [ns_server:info,2014-08-19T16:51:14.518,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/385 [ns_server:debug,2014-08-19T16:51:14.522,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/395 [ns_server:info,2014-08-19T16:51:14.523,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/395 [ns_server:debug,2014-08-19T16:51:14.527,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/904 [ns_server:info,2014-08-19T16:51:14.528,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/904 [ns_server:debug,2014-08-19T16:51:14.532,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/899 [ns_server:info,2014-08-19T16:51:14.532,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/899 [ns_server:debug,2014-08-19T16:51:14.536,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/905 [ns_server:info,2014-08-19T16:51:14.537,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/905 [ns_server:debug,2014-08-19T16:51:14.541,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/390 [ns_server:info,2014-08-19T16:51:14.542,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/390 [ns_server:debug,2014-08-19T16:51:14.546,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/392 [ns_server:info,2014-08-19T16:51:14.547,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/392 [ns_server:debug,2014-08-19T16:51:14.551,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/641 [ns_server:info,2014-08-19T16:51:14.552,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/641 [ns_server:debug,2014-08-19T16:51:14.555,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/645 [ns_server:info,2014-08-19T16:51:14.556,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/645 [ns_server:debug,2014-08-19T16:51:14.560,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/647 [ns_server:info,2014-08-19T16:51:14.561,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/647 [ns_server:debug,2014-08-19T16:51:14.565,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/643 [ns_server:info,2014-08-19T16:51:14.566,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/643 [ns_server:debug,2014-08-19T16:51:14.570,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/389 [ns_server:info,2014-08-19T16:51:14.571,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/389 [ns_server:debug,2014-08-19T16:51:14.575,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/902 [ns_server:info,2014-08-19T16:51:14.575,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/902 [ns_server:debug,2014-08-19T16:51:14.579,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/644 [ns_server:info,2014-08-19T16:51:14.579,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/644 [ns_server:debug,2014-08-19T16:51:14.584,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/388 [ns_server:info,2014-08-19T16:51:14.584,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/388 [ns_server:debug,2014-08-19T16:51:14.589,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/909 [ns_server:info,2014-08-19T16:51:14.589,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/909 [ns_server:debug,2014-08-19T16:51:14.593,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/900 [ns_server:info,2014-08-19T16:51:14.594,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/900 [ns_server:debug,2014-08-19T16:51:14.599,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/386 [ns_server:info,2014-08-19T16:51:14.600,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/386 [ns_server:debug,2014-08-19T16:51:14.604,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/394 [ns_server:info,2014-08-19T16:51:14.605,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/394 [ns_server:debug,2014-08-19T16:51:14.608,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/896 [ns_server:info,2014-08-19T16:51:14.609,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/896 [ns_server:debug,2014-08-19T16:51:14.613,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/897 [ns_server:info,2014-08-19T16:51:14.614,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/897 [ns_server:debug,2014-08-19T16:51:14.618,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/640 [ns_server:info,2014-08-19T16:51:14.618,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/640 [ns_server:debug,2014-08-19T16:51:14.623,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/649 [ns_server:info,2014-08-19T16:51:14.624,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/649 [ns_server:debug,2014-08-19T16:51:14.630,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/384 [ns_server:info,2014-08-19T16:51:14.631,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/384 [ns_server:debug,2014-08-19T16:51:14.635,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/651 [ns_server:info,2014-08-19T16:51:14.636,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/651 [ns_server:debug,2014-08-19T16:51:14.640,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/397 [ns_server:info,2014-08-19T16:51:14.641,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/397 [ns_server:debug,2014-08-19T16:51:14.645,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/646 [ns_server:info,2014-08-19T16:51:14.645,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/646 [ns_server:debug,2014-08-19T16:51:14.649,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/898 [ns_server:info,2014-08-19T16:51:14.650,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/898 [ns_server:debug,2014-08-19T16:51:14.654,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/391 [ns_server:info,2014-08-19T16:51:14.655,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/391 [ns_server:debug,2014-08-19T16:51:14.659,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/648 [ns_server:info,2014-08-19T16:51:14.659,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/648 [ns_server:debug,2014-08-19T16:51:14.663,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/642 [ns_server:info,2014-08-19T16:51:14.664,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/642 [ns_server:debug,2014-08-19T16:51:14.667,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/393 [ns_server:info,2014-08-19T16:51:14.668,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/393 [ns_server:debug,2014-08-19T16:51:14.672,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/901 [ns_server:info,2014-08-19T16:51:14.673,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/901 [ns_server:debug,2014-08-19T16:51:14.677,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/650 [ns_server:info,2014-08-19T16:51:14.677,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/650 [ns_server:debug,2014-08-19T16:51:14.681,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:144]Notifying mc_couch_events of vbucket deletion: default/903 [ns_server:info,2014-08-19T16:51:14.682,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_delete_vbucket:149]Deleting vbucket: default/903 [ns_server:debug,2014-08-19T16:51:14.857,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 106. Nacking mccouch update. [views:debug,2014-08-19T16:51:14.858,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/106. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:14.858,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",106,active,0} [ns_server:debug,2014-08-19T16:51:14.859,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,830,519,336,25,153,881, 570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101,829,518, 335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177,777,594, 283,228,828,517,334,23,151,879,568,257,202,802,619,308,253,125,853,542,359, 176,99,776,593,282,227,827,516,333,150,878,73,567,256,201,801,618,307,252, 124,852,541,47,358,175,775,592,281,226,826,515,332,21,149,877,566,383,200, 800,617,306,251,123,851,540,357,174,97,774,591,280,225,825,514,331,148,876, 71,565,382,199,799,616,305,250,122,850,539,45,356,173,773,590,279,224,824, 513,330,19,147,875,564,381,198,798,615,304,249,121,849,538,355,172,95,772, 589,278,223,823,512,329,146,874,69,563,380,197,797,614,303,248,120,848,537, 43,354,171,771,588,277,222,822,639,328,17,145,873,562,379,196,796,613,302, 247,119,847,536,353,170,93,770,587,276,221,821,638,327,144,872,67,561,378, 195,795,612,301,246,118,846,535,41,352,169,769,586,275,220,820,637,326,15, 143,871,807,624,560,377,313,194,130,858,794,611,547,53,364,300,245,181,117, 845,781,598,534,351,287,232,168,91,832,768,585,521,338,274,27,219,155,883, 819,636,572,325,261,206,142,870,806,65,623,559,376,312,193,129,1,857,793,610, 546,363,299,244,180,116,844,780,597,533,39,350,286,231,167,103,895,831,584, 520,337,273,218,154,882,818,77,635,571,324,260,205,141,13,869,558,375,192, 792,609,298,243,115,843,532,349,166,894,89,583,272,217,817,634,323,140,868, 63,557,374,191,791,608,297,242,114,842,531,37,348,165,893,582,271,216,816, 633,322,139,11,867,556,373,190,790,607,296,241,113,841,530,347,164,892,87, 581,270,215,815,632,321,138,866,61,555,372,189,789,606,295,240,112,840,529, 35,346,163,891,580,269,214,9,814,631,320,137,865,554,371,188,788,605,294,239, 111,839,528,345,162,890,85,579,268,213,813,630,319,136,864,59,553,370,187, 787,604,293,238,110,838,527,344,33,161,889,578,267,212,812,7,629,318,135,863, 552,369,186,786,603,292,237,109,837,526,343,160,888,83,577,266,211,811,628, 317,134,862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,576, 265,210,810,627,5,316,133,861,550,367,184,784,601,290,235,107,835,524,341, 158,886,81,575,264,209,809,626,315,132,860,55,549,366,183,783,600,289,234, 106,834,523,340,29,157,885,574,263,208,808,625,314,3,131,859,548,365,182,782, 599,288,233,105,833,522,339,156,884,79,573,262,207] [views:debug,2014-08-19T16:51:14.917,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/106. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:14.917,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",106,active,0} [ns_server:debug,2014-08-19T16:51:14.992,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 104. Nacking mccouch update. [views:debug,2014-08-19T16:51:14.992,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/104. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:14.992,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",104,active,0} [ns_server:debug,2014-08-19T16:51:14.993,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,830,519,336,25,153,881, 570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101,829,518, 335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177,777,594, 283,228,828,517,334,23,151,879,568,257,202,802,619,308,253,125,853,542,359, 176,99,776,593,282,227,827,516,333,150,878,73,567,256,201,801,618,307,252, 124,852,541,47,358,175,775,592,281,226,826,515,332,21,149,877,566,383,200, 800,617,306,251,123,851,540,357,174,97,774,591,280,225,825,514,331,148,876, 71,565,382,199,799,616,305,250,122,850,539,45,356,173,773,590,279,224,824, 513,330,19,147,875,564,381,198,798,615,304,249,121,849,538,355,172,95,772, 589,278,223,823,512,329,146,874,69,563,380,197,797,614,303,248,120,848,537, 43,354,171,771,588,277,222,822,639,328,17,145,873,562,379,196,796,613,302, 247,119,847,536,353,170,93,770,587,276,221,821,638,327,144,872,67,561,378, 195,795,612,301,246,118,846,535,41,352,169,769,586,275,220,820,637,326,15, 143,871,807,624,560,377,313,194,130,858,794,611,547,53,364,300,245,181,117, 845,781,598,534,351,287,232,168,104,91,832,768,585,521,338,274,27,219,155, 883,819,636,572,325,261,206,142,870,806,65,623,559,376,312,193,129,1,857,793, 610,546,363,299,244,180,116,844,780,597,533,39,350,286,231,167,103,895,831, 584,520,337,273,218,154,882,818,77,635,571,324,260,205,141,13,869,558,375, 192,792,609,298,243,115,843,532,349,166,894,89,583,272,217,817,634,323,140, 868,63,557,374,191,791,608,297,242,114,842,531,37,348,165,893,582,271,216, 816,633,322,139,11,867,556,373,190,790,607,296,241,113,841,530,347,164,892, 87,581,270,215,815,632,321,138,866,61,555,372,189,789,606,295,240,112,840, 529,35,346,163,891,580,269,214,9,814,631,320,137,865,554,371,188,788,605,294, 239,111,839,528,345,162,890,85,579,268,213,813,630,319,136,864,59,553,370, 187,787,604,293,238,110,838,527,344,33,161,889,578,267,212,812,7,629,318,135, 863,552,369,186,786,603,292,237,109,837,526,343,160,888,83,577,266,211,811, 628,317,134,862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887, 576,265,210,810,627,5,316,133,861,550,367,184,784,601,290,235,107,835,524, 341,158,886,81,575,264,209,809,626,315,132,860,55,549,366,183,783,600,289, 234,106,834,523,340,29,157,885,574,263,208,808,625,314,3,131,859,548,365,182, 782,599,288,233,105,833,522,339,156,884,79,573,262,207] [views:debug,2014-08-19T16:51:15.026,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/104. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:15.027,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",104,active,0} [ns_server:debug,2014-08-19T16:51:15.101,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 102. Nacking mccouch update. [views:debug,2014-08-19T16:51:15.101,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/102. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:15.101,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",102,active,0} [ns_server:debug,2014-08-19T16:51:15.102,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101,829, 518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177,777, 594,283,228,828,517,334,23,151,879,568,257,202,802,619,308,253,125,853,542, 359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201,801,618,307, 252,124,852,541,47,358,175,775,592,281,226,826,515,332,21,149,877,566,383, 200,800,617,306,251,123,851,540,357,174,97,774,591,280,225,825,514,331,148, 876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,773,590,279,224, 824,513,330,19,147,875,564,381,198,798,615,304,249,121,849,538,355,172,95, 772,589,278,223,823,512,329,146,874,69,563,380,197,797,614,303,248,120,848, 537,43,354,171,771,588,277,222,822,639,328,17,145,873,562,379,196,796,613, 302,247,119,847,536,353,170,93,770,587,276,221,821,638,327,144,872,67,561, 378,195,795,612,301,246,118,846,535,41,352,169,769,586,275,220,820,637,326, 15,143,871,807,624,560,377,313,194,130,858,794,611,547,53,364,300,245,181, 117,845,781,598,534,351,287,232,168,104,91,832,768,585,521,338,274,27,219, 155,883,819,636,572,325,261,206,142,870,806,65,623,559,376,312,193,129,1,857, 793,610,546,363,299,244,180,116,844,780,597,533,39,350,286,231,167,103,895, 831,584,520,337,273,218,154,882,818,77,635,571,324,260,205,141,13,869,558, 375,192,792,609,298,243,115,843,532,349,166,894,89,583,272,217,817,634,323, 140,868,63,557,374,191,791,608,297,242,114,842,531,37,348,165,893,582,271, 216,816,633,322,139,11,867,556,373,190,790,607,296,241,113,841,530,347,164, 892,87,581,270,215,815,632,321,138,866,61,555,372,189,789,606,295,240,112, 840,529,35,346,163,891,580,269,214,9,814,631,320,137,865,554,371,188,788,605, 294,239,111,839,528,345,162,890,85,579,268,213,813,630,319,136,864,59,553, 370,187,787,604,293,238,110,838,527,344,33,161,889,578,267,212,812,7,629,318, 135,863,552,369,186,786,603,292,237,109,837,526,343,160,888,83,577,266,211, 811,628,317,134,862,57,551,368,185,785,602,291,236,108,836,525,342,31,159, 887,576,265,210,810,627,5,316,133,861,550,367,184,784,601,290,235,107,835, 524,341,158,886,81,575,264,209,809,626,315,132,860,55,549,366,183,783,600, 289,234,106,834,523,340,29,157,885,574,263,208,808,625,314,3,131,859,548,365, 182,782,599,288,233,105,833,522,339,156,884,79,573,262,207] [views:debug,2014-08-19T16:51:15.135,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/102. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:15.136,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",102,active,0} [ns_server:debug,2014-08-19T16:51:15.210,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 100. Nacking mccouch update. [views:debug,2014-08-19T16:51:15.210,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/100. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:15.210,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",100,active,0} [ns_server:debug,2014-08-19T16:51:15.212,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101,829, 518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177,777, 594,283,228,100,828,517,334,23,151,879,568,257,202,802,619,308,253,125,853, 542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201,801,618, 307,252,124,852,541,47,358,175,775,592,281,226,826,515,332,21,149,877,566, 383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225,825,514,331, 148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,773,590,279, 224,824,513,330,19,147,875,564,381,198,798,615,304,249,121,849,538,355,172, 95,772,589,278,223,823,512,329,146,874,69,563,380,197,797,614,303,248,120, 848,537,43,354,171,771,588,277,222,822,639,328,17,145,873,562,379,196,796, 613,302,247,119,847,536,353,170,93,770,587,276,221,821,638,327,144,872,67, 561,378,195,795,612,301,246,118,846,535,41,352,169,769,586,275,220,820,637, 326,15,143,871,807,624,560,377,313,194,130,858,794,611,547,53,364,300,245, 181,117,845,781,598,534,351,287,232,168,104,91,832,768,585,521,338,274,27, 219,155,883,819,636,572,325,261,206,142,870,806,65,623,559,376,312,193,129,1, 857,793,610,546,363,299,244,180,116,844,780,597,533,39,350,286,231,167,103, 895,831,584,520,337,273,218,154,882,818,77,635,571,324,260,205,141,13,869, 558,375,192,792,609,298,243,115,843,532,349,166,894,89,583,272,217,817,634, 323,140,868,63,557,374,191,791,608,297,242,114,842,531,37,348,165,893,582, 271,216,816,633,322,139,11,867,556,373,190,790,607,296,241,113,841,530,347, 164,892,87,581,270,215,815,632,321,138,866,61,555,372,189,789,606,295,240, 112,840,529,35,346,163,891,580,269,214,9,814,631,320,137,865,554,371,188,788, 605,294,239,111,839,528,345,162,890,85,579,268,213,813,630,319,136,864,59, 553,370,187,787,604,293,238,110,838,527,344,33,161,889,578,267,212,812,7,629, 318,135,863,552,369,186,786,603,292,237,109,837,526,343,160,888,83,577,266, 211,811,628,317,134,862,57,551,368,185,785,602,291,236,108,836,525,342,31, 159,887,576,265,210,810,627,5,316,133,861,550,367,184,784,601,290,235,107, 835,524,341,158,886,81,575,264,209,809,626,315,132,860,55,549,366,183,783, 600,289,234,106,834,523,340,29,157,885,574,263,208,808,625,314,3,131,859,548, 365,182,782,599,288,233,105,833,522,339,156,884,79,573,262,207] [views:debug,2014-08-19T16:51:15.244,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/100. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:15.244,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",100,active,0} [ns_server:debug,2014-08-19T16:51:15.383,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 98. Nacking mccouch update. [views:debug,2014-08-19T16:51:15.383,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/98. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:15.383,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",98,active,0} [ns_server:debug,2014-08-19T16:51:15.384,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101,829, 518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177,777, 594,283,228,100,828,517,334,23,151,879,568,257,202,802,619,308,253,125,853, 542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201,801,618, 307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149,877,566, 383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225,825,514,331, 148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,773,590,279, 224,824,513,330,19,147,875,564,381,198,798,615,304,249,121,849,538,355,172, 95,772,589,278,223,823,512,329,146,874,69,563,380,197,797,614,303,248,120, 848,537,43,354,171,771,588,277,222,822,639,328,17,145,873,562,379,196,796, 613,302,247,119,847,536,353,170,93,770,587,276,221,821,638,327,144,872,67, 561,378,195,795,612,301,246,118,846,535,41,352,169,769,586,275,220,820,637, 326,15,143,871,560,377,194,858,794,611,547,53,364,300,245,181,117,845,781, 598,534,351,287,232,168,104,91,832,768,585,521,338,274,27,219,155,883,819, 636,572,325,261,206,142,870,806,65,623,559,376,312,193,129,1,857,793,610,546, 363,299,244,180,116,844,780,597,533,39,350,286,231,167,103,895,831,584,520, 337,273,218,154,882,818,77,635,571,324,260,205,141,13,869,558,375,192,792, 609,298,243,115,843,532,349,166,894,89,583,272,217,817,634,323,140,868,63, 557,374,191,791,608,297,242,114,842,531,37,348,165,893,582,271,216,816,633, 322,139,11,867,556,373,190,790,607,296,241,113,841,530,347,164,892,87,581, 270,215,815,632,321,138,866,61,555,372,189,789,606,295,240,112,840,529,35, 346,163,891,580,269,214,9,814,631,320,137,865,554,371,188,788,605,294,239, 111,839,528,345,162,890,85,579,268,213,813,630,319,136,864,59,553,370,187, 787,604,293,238,110,838,527,344,33,161,889,578,267,212,812,7,629,318,135,863, 552,369,186,786,603,292,237,109,837,526,343,160,888,83,577,266,211,811,628, 317,134,862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,576, 265,210,810,627,5,316,133,861,550,367,184,784,601,290,235,107,835,524,341, 158,886,81,575,264,209,809,626,315,132,860,55,549,366,183,783,600,289,234, 106,834,523,340,29,157,885,574,263,208,808,625,314,3,131,859,548,365,182,782, 599,288,233,105,833,522,339,156,884,79,573,262,207,807,624,313,130] [views:debug,2014-08-19T16:51:15.442,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/98. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:15.442,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",98,active,0} [ns_server:debug,2014-08-19T16:51:15.617,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 96. Nacking mccouch update. [views:debug,2014-08-19T16:51:15.617,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/96. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:15.617,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",96,active,0} [ns_server:debug,2014-08-19T16:51:15.618,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101,829, 518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177,777, 594,283,228,100,828,517,334,23,151,879,568,257,202,802,619,308,253,125,853, 542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201,801,618, 307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149,877,566, 383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225,825,514,331, 148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96,773,590,279, 224,824,513,330,19,147,875,564,381,198,798,615,304,249,121,849,538,355,172, 95,772,589,278,223,823,512,329,146,874,69,563,380,197,797,614,303,248,120, 848,537,43,354,171,771,588,277,222,822,639,328,17,145,873,562,379,196,796, 613,302,247,119,847,536,353,170,93,770,587,276,221,821,638,327,144,872,67, 561,378,195,795,612,301,246,118,846,535,41,352,169,769,586,275,220,820,637, 326,15,143,871,560,377,194,858,794,611,547,53,364,300,245,181,117,845,781, 598,534,351,287,232,168,104,91,832,768,585,521,338,274,27,219,155,883,819, 636,572,325,261,206,142,870,806,65,623,559,376,312,193,129,1,857,793,610,546, 363,299,244,180,116,844,780,597,533,39,350,286,231,167,103,895,831,584,520, 337,273,218,154,882,818,77,635,571,324,260,205,141,13,869,558,375,192,792, 609,298,243,115,843,532,349,166,894,89,583,272,217,817,634,323,140,868,63, 557,374,191,791,608,297,242,114,842,531,37,348,165,893,582,271,216,816,633, 322,139,11,867,556,373,190,790,607,296,241,113,841,530,347,164,892,87,581, 270,215,815,632,321,138,866,61,555,372,189,789,606,295,240,112,840,529,35, 346,163,891,580,269,214,9,814,631,320,137,865,554,371,188,788,605,294,239, 111,839,528,345,162,890,85,579,268,213,813,630,319,136,864,59,553,370,187, 787,604,293,238,110,838,527,344,33,161,889,578,267,212,812,7,629,318,135,863, 552,369,186,786,603,292,237,109,837,526,343,160,888,83,577,266,211,811,628, 317,134,862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,576, 265,210,810,627,5,316,133,861,550,367,184,784,601,290,235,107,835,524,341, 158,886,81,575,264,209,809,626,315,132,860,55,549,366,183,783,600,289,234, 106,834,523,340,29,157,885,574,263,208,808,625,314,3,131,859,548,365,182,782, 599,288,233,105,833,522,339,156,884,79,573,262,207,807,624,313,130] [views:debug,2014-08-19T16:51:15.676,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/96. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:15.677,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",96,active,0} [ns_server:debug,2014-08-19T16:51:15.851,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 94. Nacking mccouch update. [views:debug,2014-08-19T16:51:15.851,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/94. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:15.851,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",94,active,0} [ns_server:debug,2014-08-19T16:51:15.852,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101,829, 518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177,777, 594,283,228,100,828,517,334,23,151,879,568,257,202,802,619,308,253,125,853, 542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201,801,618, 307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149,877,566, 383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225,825,514,331, 148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96,773,590,279, 224,824,513,330,19,147,875,564,381,198,798,615,304,249,121,849,538,355,172, 95,772,589,278,223,823,512,329,146,874,69,563,380,197,797,614,303,248,120, 848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873,562,379,196,796, 613,302,247,119,847,536,353,170,93,770,587,276,221,821,638,327,144,872,67, 561,378,195,795,612,301,246,118,846,535,41,352,169,769,586,275,220,820,637, 326,15,143,871,560,377,194,858,794,611,547,53,364,300,245,181,117,845,781, 598,534,351,287,232,168,104,91,832,768,585,521,338,274,27,219,155,883,819, 636,572,325,261,206,142,870,806,65,623,559,376,312,193,129,1,857,793,610,546, 363,299,244,180,116,844,780,597,533,39,350,286,231,167,103,895,831,584,520, 337,273,218,154,882,818,77,635,571,324,260,205,141,13,869,558,375,192,792, 609,298,243,115,843,532,349,166,894,89,583,272,217,817,634,323,140,868,63, 557,374,191,791,608,297,242,114,842,531,37,348,165,893,582,271,216,816,633, 322,139,11,867,556,373,190,790,607,296,241,113,841,530,347,164,892,87,581, 270,215,815,632,321,138,866,61,555,372,189,789,606,295,240,112,840,529,35, 346,163,891,580,269,214,9,814,631,320,137,865,554,371,188,788,605,294,239, 111,839,528,345,162,890,85,579,268,213,813,630,319,136,864,59,553,370,187, 787,604,293,238,110,838,527,344,33,161,889,578,267,212,812,7,629,318,135,863, 552,369,186,786,603,292,237,109,837,526,343,160,888,83,577,266,211,811,628, 317,134,862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,576, 265,210,810,627,5,316,133,861,550,367,184,784,601,290,235,107,835,524,341, 158,886,81,575,264,209,809,626,315,132,860,55,549,366,183,783,600,289,234, 106,834,523,340,29,157,885,574,263,208,808,625,314,3,131,859,548,365,182,782, 599,288,233,105,833,522,339,156,884,79,573,262,207,807,624,313,130] [views:debug,2014-08-19T16:51:15.935,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/94. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:15.935,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",94,active,0} [ns_server:debug,2014-08-19T16:51:16.110,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 92. Nacking mccouch update. [views:debug,2014-08-19T16:51:16.110,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/92. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:16.110,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",92,active,0} [ns_server:debug,2014-08-19T16:51:16.111,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101,829, 518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177,777, 594,283,228,100,828,517,334,23,151,879,568,257,202,802,619,308,253,125,853, 542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201,801,618, 307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149,877,566, 383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225,825,514,331, 148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96,773,590,279, 224,824,513,330,19,147,875,564,381,198,798,615,304,249,121,849,538,355,172, 95,772,589,278,223,823,512,329,146,874,69,563,380,197,797,614,303,248,120, 848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873,562,379,196,796, 613,302,247,119,847,536,353,170,93,770,587,276,221,821,638,327,144,872,67, 561,378,195,795,612,301,246,118,846,535,41,352,169,92,769,586,275,220,820, 637,326,15,143,871,560,377,194,858,794,611,547,53,364,300,245,181,117,845, 781,598,534,351,287,232,168,104,91,832,768,585,521,338,274,27,219,155,883, 819,636,572,325,261,206,142,870,806,65,623,559,376,312,193,129,1,857,793,610, 546,363,299,244,180,116,844,780,597,533,39,350,286,231,167,103,895,831,584, 520,337,273,218,154,882,818,77,635,571,324,260,205,141,13,869,558,375,192, 792,609,298,243,115,843,532,349,166,894,89,583,272,217,817,634,323,140,868, 63,557,374,191,791,608,297,242,114,842,531,37,348,165,893,582,271,216,816, 633,322,139,11,867,556,373,190,790,607,296,241,113,841,530,347,164,892,87, 581,270,215,815,632,321,138,866,61,555,372,189,789,606,295,240,112,840,529, 35,346,163,891,580,269,214,9,814,631,320,137,865,554,371,188,788,605,294,239, 111,839,528,345,162,890,85,579,268,213,813,630,319,136,864,59,553,370,187, 787,604,293,238,110,838,527,344,33,161,889,578,267,212,812,7,629,318,135,863, 552,369,186,786,603,292,237,109,837,526,343,160,888,83,577,266,211,811,628, 317,134,862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,576, 265,210,810,627,5,316,133,861,550,367,184,784,601,290,235,107,835,524,341, 158,886,81,575,264,209,809,626,315,132,860,55,549,366,183,783,600,289,234, 106,834,523,340,29,157,885,574,263,208,808,625,314,3,131,859,548,365,182,782, 599,288,233,105,833,522,339,156,884,79,573,262,207,807,624,313,130] [views:debug,2014-08-19T16:51:16.194,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/92. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:16.194,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",92,active,0} [ns_server:debug,2014-08-19T16:51:16.369,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 90. Nacking mccouch update. [views:debug,2014-08-19T16:51:16.369,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/90. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:16.369,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",90,active,0} [ns_server:debug,2014-08-19T16:51:16.371,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101,829, 518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177,777, 594,283,228,100,828,517,334,23,151,879,568,257,202,802,619,308,253,125,853, 542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201,801,618, 307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149,877,566, 383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225,825,514,331, 148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96,773,590,279, 224,824,513,330,19,147,875,564,381,198,798,615,304,249,121,849,538,355,172, 95,772,589,278,223,823,512,329,146,874,69,563,380,197,797,614,303,248,120, 848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873,562,379,196,796, 613,302,247,119,847,536,353,170,93,770,587,276,221,821,638,327,144,872,67, 561,378,195,795,612,301,246,118,846,535,41,352,169,92,769,586,275,220,820, 637,326,15,143,871,560,377,194,858,794,611,547,53,364,300,245,181,117,845, 781,598,534,351,287,232,168,104,91,832,768,585,521,338,274,27,219,155,883, 819,636,572,325,261,206,142,870,806,65,623,559,376,312,193,129,1,857,793,610, 546,363,299,244,180,116,844,780,597,533,39,350,286,231,167,103,90,895,831, 584,520,337,273,218,154,882,818,77,635,571,324,260,205,141,13,869,558,375, 192,792,609,298,243,115,843,532,349,166,894,89,583,272,217,817,634,323,140, 868,63,557,374,191,791,608,297,242,114,842,531,37,348,165,893,582,271,216, 816,633,322,139,11,867,556,373,190,790,607,296,241,113,841,530,347,164,892, 87,581,270,215,815,632,321,138,866,61,555,372,189,789,606,295,240,112,840, 529,35,346,163,891,580,269,214,9,814,631,320,137,865,554,371,188,788,605,294, 239,111,839,528,345,162,890,85,579,268,213,813,630,319,136,864,59,553,370, 187,787,604,293,238,110,838,527,344,33,161,889,578,267,212,812,7,629,318,135, 863,552,369,186,786,603,292,237,109,837,526,343,160,888,83,577,266,211,811, 628,317,134,862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887, 576,265,210,810,627,5,316,133,861,550,367,184,784,601,290,235,107,835,524, 341,158,886,81,575,264,209,809,626,315,132,860,55,549,366,183,783,600,289, 234,106,834,523,340,29,157,885,574,263,208,808,625,314,3,131,859,548,365,182, 782,599,288,233,105,833,522,339,156,884,79,573,262,207,807,624,313,130] [views:debug,2014-08-19T16:51:16.428,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/90. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:16.428,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",90,active,0} [ns_server:debug,2014-08-19T16:51:16.536,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 88. Nacking mccouch update. [views:debug,2014-08-19T16:51:16.536,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/88. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:16.536,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",88,active,0} [ns_server:debug,2014-08-19T16:51:16.538,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101,829, 518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177,777, 594,283,228,100,828,517,334,23,151,879,568,257,202,802,619,308,253,125,853, 542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201,801,618, 307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149,877,566, 383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225,825,514,331, 148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96,773,590,279, 224,824,513,330,19,147,875,564,381,198,798,615,304,249,121,849,538,355,172, 95,772,589,278,223,823,512,329,146,874,69,563,380,197,797,614,303,248,120, 848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873,562,379,196,796, 613,302,247,119,847,536,353,170,93,770,587,276,221,821,638,327,144,872,67, 561,378,195,795,612,301,246,118,846,535,41,352,169,92,769,586,275,220,820, 637,326,15,143,871,560,377,194,794,611,300,245,117,845,781,598,534,351,287, 232,168,104,91,832,768,585,521,338,274,27,219,155,883,819,636,572,325,261, 206,142,870,806,65,623,559,376,312,193,129,1,857,793,610,546,363,299,244,180, 116,844,780,597,533,39,350,286,231,167,103,90,895,831,584,520,337,273,218, 154,882,818,77,635,571,324,260,205,141,13,869,558,375,192,792,609,298,243, 115,843,532,349,166,894,89,583,272,217,817,634,323,140,868,63,557,374,191, 791,608,297,242,114,842,531,37,348,165,893,88,582,271,216,816,633,322,139,11, 867,556,373,190,790,607,296,241,113,841,530,347,164,892,87,581,270,215,815, 632,321,138,866,61,555,372,189,789,606,295,240,112,840,529,35,346,163,891, 580,269,214,9,814,631,320,137,865,554,371,188,788,605,294,239,111,839,528, 345,162,890,85,579,268,213,813,630,319,136,864,59,553,370,187,787,604,293, 238,110,838,527,344,33,161,889,578,267,212,812,7,629,318,135,863,552,369,186, 786,603,292,237,109,837,526,343,160,888,83,577,266,211,811,628,317,134,862, 57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,576,265,210,810, 627,5,316,133,861,550,367,184,784,601,290,235,107,835,524,341,158,886,81,575, 264,209,809,626,315,132,860,55,549,366,183,783,600,289,234,106,834,523,340, 29,157,885,574,263,208,808,625,314,3,131,859,548,365,182,782,599,288,233,105, 833,522,339,156,884,79,573,262,207,807,624,313,130,858,547,53,364,181] [views:debug,2014-08-19T16:51:16.587,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/88. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:16.587,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",88,active,0} [ns_server:debug,2014-08-19T16:51:16.678,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 86. Nacking mccouch update. [views:debug,2014-08-19T16:51:16.679,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/86. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:16.679,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",86,active,0} [ns_server:debug,2014-08-19T16:51:16.680,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101,829, 518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177,777, 594,283,228,100,828,517,334,23,151,879,568,257,202,802,619,308,253,125,853, 542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201,801,618, 307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149,877,566, 383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225,825,514,331, 148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96,773,590,279, 224,824,513,330,19,147,875,564,381,198,798,615,304,249,121,849,538,355,172, 95,772,589,278,223,823,512,329,146,874,69,563,380,197,797,614,303,248,120, 848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873,562,379,196,796, 613,302,247,119,847,536,353,170,93,770,587,276,221,821,638,327,144,872,67, 561,378,195,795,612,301,246,118,846,535,41,352,169,92,769,586,275,220,820, 637,326,15,143,871,560,377,194,794,611,300,245,117,845,781,598,534,351,287, 232,168,104,91,832,768,585,521,338,274,27,219,155,883,819,636,572,325,261, 206,142,870,806,65,623,559,376,312,193,129,1,857,793,610,546,363,299,244,180, 116,844,780,597,533,39,350,286,231,167,103,90,895,831,584,520,337,273,218, 154,882,818,77,635,571,324,260,205,141,13,869,558,375,192,792,609,298,243, 115,843,532,349,166,894,89,583,272,217,817,634,323,140,868,63,557,374,191, 791,608,297,242,114,842,531,37,348,165,893,88,582,271,216,816,633,322,139,11, 867,556,373,190,790,607,296,241,113,841,530,347,164,892,87,581,270,215,815, 632,321,138,866,61,555,372,189,789,606,295,240,112,840,529,35,346,163,891,86, 580,269,214,9,814,631,320,137,865,554,371,188,788,605,294,239,111,839,528, 345,162,890,85,579,268,213,813,630,319,136,864,59,553,370,187,787,604,293, 238,110,838,527,344,33,161,889,578,267,212,812,7,629,318,135,863,552,369,186, 786,603,292,237,109,837,526,343,160,888,83,577,266,211,811,628,317,134,862, 57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,576,265,210,810, 627,5,316,133,861,550,367,184,784,601,290,235,107,835,524,341,158,886,81,575, 264,209,809,626,315,132,860,55,549,366,183,783,600,289,234,106,834,523,340, 29,157,885,574,263,208,808,625,314,3,131,859,548,365,182,782,599,288,233,105, 833,522,339,156,884,79,573,262,207,807,624,313,130,858,547,53,364,181] [views:debug,2014-08-19T16:51:16.729,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/86. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:16.730,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",86,active,0} [ns_server:debug,2014-08-19T16:51:16.821,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 84. Nacking mccouch update. [views:debug,2014-08-19T16:51:16.821,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/84. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:16.821,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",84,active,0} [ns_server:debug,2014-08-19T16:51:16.823,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101,829, 518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177,777, 594,283,228,100,828,517,334,23,151,879,568,257,202,802,619,308,253,125,853, 542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201,801,618, 307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149,877,566, 383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225,825,514,331, 148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96,773,590,279, 224,824,513,330,19,147,875,564,381,198,798,615,304,249,121,849,538,355,172, 95,772,589,278,223,823,512,329,146,874,69,563,380,197,797,614,303,248,120, 848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873,562,379,196,796, 613,302,247,119,847,536,353,170,93,770,587,276,221,821,638,327,144,872,67, 561,378,195,795,612,301,246,118,846,535,41,352,169,92,769,586,275,220,820, 637,326,15,143,871,560,377,194,794,611,300,245,117,845,781,598,534,351,287, 232,168,104,91,832,768,585,521,338,274,27,219,155,883,819,636,572,325,261, 206,142,870,806,65,623,559,376,312,193,129,1,857,793,610,546,363,299,244,180, 116,844,780,597,533,39,350,286,231,167,103,90,895,831,584,520,337,273,218, 154,882,818,77,635,571,324,260,205,141,13,869,558,375,192,792,609,298,243, 115,843,532,349,166,894,89,583,272,217,817,634,323,140,868,63,557,374,191, 791,608,297,242,114,842,531,37,348,165,893,88,582,271,216,816,633,322,139,11, 867,556,373,190,790,607,296,241,113,841,530,347,164,892,87,581,270,215,815, 632,321,138,866,61,555,372,189,789,606,295,240,112,840,529,35,346,163,891,86, 580,269,214,9,814,631,320,137,865,554,371,188,788,605,294,239,111,839,528, 345,162,890,85,579,268,213,813,630,319,136,864,59,553,370,187,787,604,293, 238,110,838,527,344,33,161,889,84,578,267,212,812,7,629,318,135,863,552,369, 186,786,603,292,237,109,837,526,343,160,888,83,577,266,211,811,628,317,134, 862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,576,265,210, 810,627,5,316,133,861,550,367,184,784,601,290,235,107,835,524,341,158,886,81, 575,264,209,809,626,315,132,860,55,549,366,183,783,600,289,234,106,834,523, 340,29,157,885,574,263,208,808,625,314,3,131,859,548,365,182,782,599,288,233, 105,833,522,339,156,884,79,573,262,207,807,624,313,130,858,547,53,364,181] [views:debug,2014-08-19T16:51:16.871,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/84. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:16.872,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",84,active,0} [ns_server:debug,2014-08-19T16:51:16.947,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 82. Nacking mccouch update. [views:debug,2014-08-19T16:51:16.947,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/82. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:16.947,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",82,active,0} [ns_server:debug,2014-08-19T16:51:16.948,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101,829, 518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177,777, 594,283,228,100,828,517,334,23,151,879,568,257,202,802,619,308,253,125,853, 542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201,801,618, 307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149,877,566, 383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225,825,514,331, 148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96,773,590,279, 224,824,513,330,19,147,875,564,381,198,798,615,304,249,121,849,538,355,172, 95,772,589,278,223,823,512,329,146,874,69,563,380,197,797,614,303,248,120, 848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873,562,379,196,796, 613,302,247,119,847,536,353,170,93,770,587,276,221,821,638,327,144,872,67, 561,378,195,795,612,301,246,118,846,535,41,352,169,92,769,586,275,220,820, 637,326,15,143,871,560,377,194,794,611,300,245,117,845,781,598,534,351,287, 232,168,104,91,832,768,585,521,338,274,27,219,155,883,819,636,572,325,261, 206,142,870,806,65,623,559,376,312,193,129,1,857,793,610,546,363,299,244,180, 116,844,780,597,533,39,350,286,231,167,103,90,895,831,584,520,337,273,218, 154,882,818,77,635,571,324,260,205,141,13,869,558,375,192,792,609,298,243, 115,843,532,349,166,894,89,583,272,217,817,634,323,140,868,63,557,374,191, 791,608,297,242,114,842,531,37,348,165,893,88,582,271,216,816,633,322,139,11, 867,556,373,190,790,607,296,241,113,841,530,347,164,892,87,581,270,215,815, 632,321,138,866,61,555,372,189,789,606,295,240,112,840,529,35,346,163,891,86, 580,269,214,9,814,631,320,137,865,554,371,188,788,605,294,239,111,839,528, 345,162,890,85,579,268,213,813,630,319,136,864,59,553,370,187,787,604,293, 238,110,838,527,344,33,161,889,84,578,267,212,812,7,629,318,135,863,552,369, 186,786,603,292,237,109,837,526,343,160,888,83,577,266,211,811,628,317,134, 862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,82,576,265,210, 810,627,5,316,133,861,550,367,184,784,601,290,235,107,835,524,341,158,886,81, 575,264,209,809,626,315,132,860,55,549,366,183,783,600,289,234,106,834,523, 340,29,157,885,574,263,208,808,625,314,3,131,859,548,365,182,782,599,288,233, 105,833,522,339,156,884,79,573,262,207,807,624,313,130,858,547,53,364,181] [views:debug,2014-08-19T16:51:16.981,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/82. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:16.981,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",82,active,0} [ns_server:debug,2014-08-19T16:51:17.094,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 80. Nacking mccouch update. [views:debug,2014-08-19T16:51:17.094,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/80. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:17.095,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",80,active,0} [ns_server:debug,2014-08-19T16:51:17.096,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101,829, 518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177,777, 594,283,228,100,828,517,334,23,151,879,568,257,202,802,619,308,253,125,853, 542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201,801,618, 307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149,877,566, 383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225,825,514,331, 148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96,773,590,279, 224,824,513,330,19,147,875,564,381,198,798,615,304,249,121,849,538,355,172, 95,772,589,278,223,823,512,329,146,874,69,563,380,197,797,614,303,248,120, 848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873,562,379,196,796, 613,302,247,119,847,536,353,170,93,770,587,276,221,821,638,327,144,872,67, 561,378,195,795,612,301,246,118,846,535,41,352,169,92,769,586,275,220,820, 637,326,15,143,871,560,377,194,794,611,300,245,117,845,781,598,534,351,287, 232,168,104,91,832,768,585,521,338,274,27,219,155,883,819,636,572,325,261, 206,142,870,806,65,623,559,376,312,193,129,1,857,793,610,546,363,299,244,180, 116,844,780,597,533,39,350,286,231,167,103,90,895,831,584,520,337,273,218, 154,882,818,77,635,571,324,260,205,141,13,869,558,375,192,792,609,298,243, 115,843,532,349,166,894,89,583,272,217,817,634,323,140,868,63,557,374,191, 791,608,297,242,114,842,531,37,348,165,893,88,582,271,216,816,633,322,139,11, 867,556,373,190,790,607,296,241,113,841,530,347,164,892,87,581,270,215,815, 632,321,138,866,61,555,372,189,789,606,295,240,112,840,529,35,346,163,891,86, 580,269,214,9,814,631,320,137,865,554,371,188,788,605,294,239,111,839,528, 345,162,890,85,579,268,213,813,630,319,136,864,59,553,370,187,787,604,293, 238,110,838,527,344,33,161,889,84,578,267,212,812,7,629,318,135,863,552,369, 186,786,603,292,237,109,837,526,343,160,888,83,577,266,211,811,628,317,134, 862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,82,576,265,210, 810,627,5,316,133,861,550,367,184,784,601,290,235,107,835,524,341,158,886,81, 575,264,209,809,626,315,132,860,55,549,366,183,783,600,289,234,106,834,523, 340,29,157,885,80,574,263,208,808,625,314,3,131,859,548,365,182,782,599,288, 233,105,833,522,339,156,884,79,573,262,207,807,624,313,130,858,547,53,364, 181] [views:debug,2014-08-19T16:51:17.153,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/80. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:17.153,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",80,active,0} [rebalance:info,2014-08-19T16:51:17.300,ns_1@10.242.238.88:<0.18664.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 380 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:17.300,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 380 state to active [rebalance:info,2014-08-19T16:51:17.304,ns_1@10.242.238.88:<0.18664.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 380 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:17.305,ns_1@10.242.238.88:<0.18664.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:17.328,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 78. Nacking mccouch update. [views:debug,2014-08-19T16:51:17.329,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/78. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:17.329,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",78,active,0} [ns_server:debug,2014-08-19T16:51:17.330,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101,829, 518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177,777, 594,283,228,100,828,517,334,23,151,879,568,257,202,802,619,308,253,125,853, 542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201,801,618, 307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149,877,566, 383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225,825,514,331, 148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96,773,590,279, 224,824,513,330,19,147,875,564,381,198,798,615,304,249,121,849,538,355,172, 95,772,589,278,223,823,512,329,146,874,69,563,380,197,797,614,303,248,120, 848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873,562,379,196,796, 613,302,247,119,847,536,353,170,93,770,587,276,221,821,638,327,144,872,67, 561,378,195,795,612,301,246,118,846,535,41,352,169,92,769,586,275,220,820, 637,326,15,143,871,560,377,194,794,611,300,245,117,845,534,351,168,91,832, 768,585,521,338,274,27,219,155,883,819,78,636,572,325,261,206,142,870,806,65, 623,559,376,312,193,129,1,857,793,610,546,363,299,244,180,116,844,780,597, 533,39,350,286,231,167,103,90,895,831,584,520,337,273,218,154,882,818,77,635, 571,324,260,205,141,13,869,558,375,192,792,609,298,243,115,843,532,349,166, 894,89,583,272,217,817,634,323,140,868,63,557,374,191,791,608,297,242,114, 842,531,37,348,165,893,88,582,271,216,816,633,322,139,11,867,556,373,190,790, 607,296,241,113,841,530,347,164,892,87,581,270,215,815,632,321,138,866,61, 555,372,189,789,606,295,240,112,840,529,35,346,163,891,86,580,269,214,9,814, 631,320,137,865,554,371,188,788,605,294,239,111,839,528,345,162,890,85,579, 268,213,813,630,319,136,864,59,553,370,187,787,604,293,238,110,838,527,344, 33,161,889,84,578,267,212,812,7,629,318,135,863,552,369,186,786,603,292,237, 109,837,526,343,160,888,83,577,266,211,811,628,317,134,862,57,551,368,185, 785,602,291,236,108,836,525,342,31,159,887,82,576,265,210,810,627,5,316,133, 861,550,367,184,784,601,290,235,107,835,524,341,158,886,81,575,264,209,809, 626,315,132,860,55,549,366,183,783,600,289,234,106,834,523,340,29,157,885,80, 574,263,208,808,625,314,3,131,859,548,365,182,782,599,288,233,105,833,522, 339,156,884,79,573,262,207,807,624,313,130,858,547,53,364,181,781,598,287, 232,104] [views:debug,2014-08-19T16:51:17.413,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/78. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:17.413,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",78,active,0} [ns_server:debug,2014-08-19T16:51:17.588,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 76. Nacking mccouch update. [views:debug,2014-08-19T16:51:17.588,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/76. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:17.588,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",76,active,0} [ns_server:debug,2014-08-19T16:51:17.589,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101, 829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177, 777,594,283,228,100,828,517,334,23,151,879,568,257,202,802,619,308,253,125, 853,542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201,801, 618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149,877, 566,383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225,825,514, 331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96,773,590, 279,224,824,513,330,19,147,875,564,381,198,798,615,304,249,121,849,538,355, 172,95,772,589,278,223,823,512,329,146,874,69,563,380,197,797,614,303,248, 120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873,562,379,196, 796,613,302,247,119,847,536,353,170,93,770,587,276,221,821,638,327,144,872, 67,561,378,195,795,612,301,246,118,846,535,41,352,169,92,769,586,275,220,820, 637,326,15,143,871,560,377,194,794,611,300,245,117,845,534,351,168,91,832, 768,585,521,338,274,27,219,155,883,819,78,636,572,325,261,206,142,870,806,65, 623,559,376,312,193,129,1,857,793,610,546,363,299,244,180,116,844,780,597, 533,39,350,286,231,167,103,90,895,831,584,520,337,273,218,154,882,818,77,635, 571,324,260,205,141,13,869,558,375,192,792,609,298,243,115,843,532,349,166, 894,89,583,272,217,817,634,323,140,868,63,557,374,191,791,608,297,242,114, 842,531,37,348,165,893,88,582,271,216,816,633,322,139,11,867,556,373,190,790, 607,296,241,113,841,530,347,164,892,87,581,270,215,815,632,321,138,866,61, 555,372,189,789,606,295,240,112,840,529,35,346,163,891,86,580,269,214,9,814, 631,320,137,865,554,371,188,788,605,294,239,111,839,528,345,162,890,85,579, 268,213,813,630,319,136,864,59,553,370,187,787,604,293,238,110,838,527,344, 33,161,889,84,578,267,212,812,7,629,318,135,863,552,369,186,786,603,292,237, 109,837,526,343,160,888,83,577,266,211,811,628,317,134,862,57,551,368,185, 785,602,291,236,108,836,525,342,31,159,887,82,576,265,210,810,627,5,316,133, 861,550,367,184,784,601,290,235,107,835,524,341,158,886,81,575,264,209,809, 626,315,132,860,55,549,366,183,783,600,289,234,106,834,523,340,29,157,885,80, 574,263,208,808,625,314,3,131,859,548,365,182,782,599,288,233,105,833,522, 339,156,884,79,573,262,207,807,624,313,130,858,547,53,364,181,781,598,287, 232,104] [views:debug,2014-08-19T16:51:17.672,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/76. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:17.672,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",76,active,0} [ns_server:debug,2014-08-19T16:51:17.847,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 74. Nacking mccouch update. [views:debug,2014-08-19T16:51:17.847,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/74. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:17.847,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",74,active,0} [ns_server:debug,2014-08-19T16:51:17.848,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101, 829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177, 777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308,253, 125,853,542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201, 801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149, 877,566,383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225,825, 514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96,773, 590,279,224,824,513,330,19,147,875,564,381,198,798,615,304,249,121,849,538, 355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197,797,614,303, 248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873,562,379, 196,796,613,302,247,119,847,536,353,170,93,770,587,276,221,821,638,327,144, 872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92,769,586,275,220, 820,637,326,15,143,871,560,377,194,794,611,300,245,117,845,534,351,168,91, 832,768,585,521,338,274,27,219,155,883,819,78,636,572,325,261,206,142,870, 806,65,623,559,376,312,193,129,1,857,793,610,546,363,299,244,180,116,844,780, 597,533,39,350,286,231,167,103,90,895,831,584,520,337,273,218,154,882,818,77, 635,571,324,260,205,141,13,869,558,375,192,792,609,298,243,115,843,532,349, 166,894,89,583,272,217,817,634,323,140,868,63,557,374,191,791,608,297,242, 114,842,531,37,348,165,893,88,582,271,216,816,633,322,139,11,867,556,373,190, 790,607,296,241,113,841,530,347,164,892,87,581,270,215,815,632,321,138,866, 61,555,372,189,789,606,295,240,112,840,529,35,346,163,891,86,580,269,214,9, 814,631,320,137,865,554,371,188,788,605,294,239,111,839,528,345,162,890,85, 579,268,213,813,630,319,136,864,59,553,370,187,787,604,293,238,110,838,527, 344,33,161,889,84,578,267,212,812,7,629,318,135,863,552,369,186,786,603,292, 237,109,837,526,343,160,888,83,577,266,211,811,628,317,134,862,57,551,368, 185,785,602,291,236,108,836,525,342,31,159,887,82,576,265,210,810,627,5,316, 133,861,550,367,184,784,601,290,235,107,835,524,341,158,886,81,575,264,209, 809,626,315,132,860,55,549,366,183,783,600,289,234,106,834,523,340,29,157, 885,80,574,263,208,808,625,314,3,131,859,548,365,182,782,599,288,233,105,833, 522,339,156,884,79,573,262,207,807,624,313,130,858,547,53,364,181,781,598, 287,232,104] [views:debug,2014-08-19T16:51:17.931,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/74. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:17.931,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",74,active,0} [ns_server:debug,2014-08-19T16:51:17.950,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,874, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_874_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_874_'ns_1@10.242.238.89'">>}]}, {move_state,363, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_363_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_363_'ns_1@10.242.238.90'">>}]}, {move_state,619, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_619_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_619_'ns_1@10.242.238.89'">>}]}, {move_state,875, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_875_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_875_'ns_1@10.242.238.89'">>}]}, {move_state,364, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_364_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_364_'ns_1@10.242.238.90'">>}]}, {move_state,620, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_620_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_620_'ns_1@10.242.238.89'">>}]}, {move_state,876, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_876_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_876_'ns_1@10.242.238.89'">>}]}, {move_state,365, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_365_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_365_'ns_1@10.242.238.90'">>}]}, {move_state,621, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_621_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_621_'ns_1@10.242.238.89'">>}]}, {move_state,877, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_877_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_877_'ns_1@10.242.238.89'">>}]}, {move_state,366, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_366_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_366_'ns_1@10.242.238.90'">>}]}, {move_state,622, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_622_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_622_'ns_1@10.242.238.89'">>}]}, {move_state,878, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_878_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_878_'ns_1@10.242.238.89'">>}]}, {move_state,367, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_367_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_367_'ns_1@10.242.238.90'">>}]}, {move_state,623, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_623_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_623_'ns_1@10.242.238.89'">>}]}, {move_state,879, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_879_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_879_'ns_1@10.242.238.89'">>}]}, {move_state,368, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_368_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_368_'ns_1@10.242.238.90'">>}]}, {move_state,624, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_624_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_624_'ns_1@10.242.238.89'">>}]}, {move_state,880, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_880_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_880_'ns_1@10.242.238.89'">>}]}, {move_state,369, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_369_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_369_'ns_1@10.242.238.90'">>}]}, {move_state,625, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_625_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_625_'ns_1@10.242.238.89'">>}]}, {move_state,881, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_881_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_881_'ns_1@10.242.238.89'">>}]}, {move_state,370, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_370_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_370_'ns_1@10.242.238.90'">>}]}, {move_state,626, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_626_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_626_'ns_1@10.242.238.89'">>}]}, {move_state,882, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_882_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_882_'ns_1@10.242.238.89'">>}]}, {move_state,371, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_371_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_371_'ns_1@10.242.238.90'">>}]}, {move_state,627, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_627_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_627_'ns_1@10.242.238.89'">>}]}, {move_state,883, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_883_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_883_'ns_1@10.242.238.89'">>}]}, {move_state,372, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_372_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_372_'ns_1@10.242.238.90'">>}]}, {move_state,628, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_628_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_628_'ns_1@10.242.238.89'">>}]}, {move_state,884, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_884_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_884_'ns_1@10.242.238.89'">>}]}, {move_state,373, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_373_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_373_'ns_1@10.242.238.90'">>}]}, {move_state,629, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_629_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_629_'ns_1@10.242.238.89'">>}]}, {move_state,885, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_885_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_885_'ns_1@10.242.238.89'">>}]}, {move_state,374, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_374_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_374_'ns_1@10.242.238.90'">>}]}, {move_state,630, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_630_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_630_'ns_1@10.242.238.89'">>}]}, {move_state,886, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_886_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_886_'ns_1@10.242.238.89'">>}]}, {move_state,375, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_375_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_375_'ns_1@10.242.238.90'">>}]}, {move_state,631, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_631_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_631_'ns_1@10.242.238.89'">>}]}, {move_state,887, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_887_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_887_'ns_1@10.242.238.89'">>}]}, {move_state,376, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_376_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_376_'ns_1@10.242.238.90'">>}]}, {move_state,632, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_632_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_632_'ns_1@10.242.238.89'">>}]}, {move_state,888, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_888_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_888_'ns_1@10.242.238.89'">>}]}, {move_state,377, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_377_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_377_'ns_1@10.242.238.90'">>}]}, {move_state,633, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_633_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_633_'ns_1@10.242.238.89'">>}]}, {move_state,889, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_889_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_889_'ns_1@10.242.238.89'">>}]}, {move_state,378, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_378_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_378_'ns_1@10.242.238.90'">>}]}, {move_state,634, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_634_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_634_'ns_1@10.242.238.89'">>}]}, {move_state,890, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_890_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_890_'ns_1@10.242.238.89'">>}]}, {move_state,379, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_379_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_379_'ns_1@10.242.238.90'">>}]}, {move_state,635, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_635_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_635_'ns_1@10.242.238.89'">>}]}, {move_state,891, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_891_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_891_'ns_1@10.242.238.89'">>}]}, {move_state,380, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_380_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_380_'ns_1@10.242.238.90'">>}]}, {move_state,636, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_636_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_636_'ns_1@10.242.238.89'">>}]}, {move_state,892, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_892_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_892_'ns_1@10.242.238.89'">>}]}, {move_state,381, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_381_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_381_'ns_1@10.242.238.90'">>}]}, {move_state,637, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_637_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_637_'ns_1@10.242.238.89'">>}]}, {move_state,893, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_893_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_893_'ns_1@10.242.238.89'">>}]}, {move_state,382, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_382_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_382_'ns_1@10.242.238.90'">>}]}, {move_state,638, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_638_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_638_'ns_1@10.242.238.89'">>}]}, {move_state,894, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_894_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_894_'ns_1@10.242.238.89'">>}]}, {move_state,383, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_383_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_383_'ns_1@10.242.238.90'">>}]}, {move_state,895, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_895_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_895_'ns_1@10.242.238.89'">>}]}, {move_state,639, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_639_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_639_'ns_1@10.242.238.89'">>}]}] [ns_server:debug,2014-08-19T16:51:17.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 874, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 363, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:17.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 619, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 875, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 364, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:17.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 620, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 876, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 365, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:17.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 621, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 877, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 366, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:17.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 622, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 878, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 367, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:17.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 623, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 879, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 368, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:17.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 624, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 880, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 369, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:17.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 625, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 881, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 370, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:17.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 626, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 882, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 371, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:17.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 627, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 883, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 372, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:17.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 628, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 884, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 373, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:17.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 629, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 885, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 374, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:17.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 630, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 886, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 375, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:17.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 631, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 887, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 376, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:17.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 632, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 888, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 377, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:17.985,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 633, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.985,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 889, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.986,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 378, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:17.986,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 634, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.987,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 890, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.987,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 379, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:17.988,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 635, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.988,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 891, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.989,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 380, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:17.989,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 636, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.990,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 892, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.990,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 381, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:17.991,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 637, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.991,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 893, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.992,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 382, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:17.992,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 638, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.993,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 894, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.993,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 383, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:17.994,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 895, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:17.994,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 639, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:18.106,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 72. Nacking mccouch update. [views:debug,2014-08-19T16:51:18.106,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/72. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:18.106,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",72,active,0} [ns_server:debug,2014-08-19T16:51:18.107,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101, 829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177, 777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308,253, 125,853,542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201, 801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149, 877,72,566,383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225, 825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96, 773,590,279,224,824,513,330,19,147,875,564,381,198,798,615,304,249,121,849, 538,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197,797,614, 303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873,562, 379,196,796,613,302,247,119,847,536,353,170,93,770,587,276,221,821,638,327, 144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92,769,586,275, 220,820,637,326,15,143,871,560,377,194,794,611,300,245,117,845,534,351,168, 91,832,768,585,521,338,274,27,219,155,883,819,78,636,572,325,261,206,142,870, 806,65,623,559,376,312,193,129,1,857,793,610,546,363,299,244,180,116,844,780, 597,533,39,350,286,231,167,103,90,895,831,584,520,337,273,218,154,882,818,77, 635,571,324,260,205,141,13,869,558,375,192,792,609,298,243,115,843,532,349, 166,894,89,583,272,217,817,634,323,140,868,63,557,374,191,791,608,297,242, 114,842,531,37,348,165,893,88,582,271,216,816,633,322,139,11,867,556,373,190, 790,607,296,241,113,841,530,347,164,892,87,581,270,215,815,632,321,138,866, 61,555,372,189,789,606,295,240,112,840,529,35,346,163,891,86,580,269,214,9, 814,631,320,137,865,554,371,188,788,605,294,239,111,839,528,345,162,890,85, 579,268,213,813,630,319,136,864,59,553,370,187,787,604,293,238,110,838,527, 344,33,161,889,84,578,267,212,812,7,629,318,135,863,552,369,186,786,603,292, 237,109,837,526,343,160,888,83,577,266,211,811,628,317,134,862,57,551,368, 185,785,602,291,236,108,836,525,342,31,159,887,82,576,265,210,810,627,5,316, 133,861,550,367,184,784,601,290,235,107,835,524,341,158,886,81,575,264,209, 809,626,315,132,860,55,549,366,183,783,600,289,234,106,834,523,340,29,157, 885,80,574,263,208,808,625,314,3,131,859,548,365,182,782,599,288,233,105,833, 522,339,156,884,79,573,262,207,807,624,313,130,858,547,53,364,181,781,598, 287,232,104] [views:debug,2014-08-19T16:51:18.165,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/72. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:18.165,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",72,active,0} [ns_server:debug,2014-08-19T16:51:18.340,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 70. Nacking mccouch update. [views:debug,2014-08-19T16:51:18.340,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/70. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:18.340,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",70,active,0} [ns_server:debug,2014-08-19T16:51:18.341,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101, 829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177, 777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308,253, 125,853,542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201, 801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149, 877,72,566,383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225, 825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96, 773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249,121, 849,538,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197,797, 614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873, 562,379,196,796,613,302,247,119,847,536,353,170,93,770,587,276,221,821,638, 327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92,769,586, 275,220,820,637,326,15,143,871,560,377,194,794,611,300,245,117,845,534,351, 168,91,832,768,585,521,338,274,27,219,155,883,819,78,636,572,325,261,206,142, 870,806,65,623,559,376,312,193,129,1,857,793,610,546,363,299,244,180,116,844, 780,597,533,39,350,286,231,167,103,90,895,831,584,520,337,273,218,154,882, 818,77,635,571,324,260,205,141,13,869,558,375,192,792,609,298,243,115,843, 532,349,166,894,89,583,272,217,817,634,323,140,868,63,557,374,191,791,608, 297,242,114,842,531,37,348,165,893,88,582,271,216,816,633,322,139,11,867,556, 373,190,790,607,296,241,113,841,530,347,164,892,87,581,270,215,815,632,321, 138,866,61,555,372,189,789,606,295,240,112,840,529,35,346,163,891,86,580,269, 214,9,814,631,320,137,865,554,371,188,788,605,294,239,111,839,528,345,162, 890,85,579,268,213,813,630,319,136,864,59,553,370,187,787,604,293,238,110, 838,527,344,33,161,889,84,578,267,212,812,7,629,318,135,863,552,369,186,786, 603,292,237,109,837,526,343,160,888,83,577,266,211,811,628,317,134,862,57, 551,368,185,785,602,291,236,108,836,525,342,31,159,887,82,576,265,210,810, 627,5,316,133,861,550,367,184,784,601,290,235,107,835,524,341,158,886,81,575, 264,209,809,626,315,132,860,55,549,366,183,783,600,289,234,106,834,523,340, 29,157,885,80,574,263,208,808,625,314,3,131,859,548,365,182,782,599,288,233, 105,833,522,339,156,884,79,573,262,207,807,624,313,130,858,547,53,364,181, 781,598,287,232,104] [views:debug,2014-08-19T16:51:18.407,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/70. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:18.408,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",70,active,0} [ns_server:debug,2014-08-19T16:51:18.483,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 68. Nacking mccouch update. [views:debug,2014-08-19T16:51:18.483,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/68. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:18.484,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",68,active,0} [ns_server:debug,2014-08-19T16:51:18.485,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101, 829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177, 777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308,253, 125,853,542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201, 801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149, 877,72,566,383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225, 825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96, 773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249,121, 849,538,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197,797, 614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873, 68,562,379,196,796,613,302,247,119,847,536,353,170,93,770,587,276,221,821, 638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92,769, 586,275,220,820,637,326,15,143,871,560,377,194,794,611,300,245,117,845,534, 351,168,91,768,585,274,219,883,819,78,636,572,325,261,206,142,870,806,65,623, 559,376,312,193,129,1,857,793,610,546,363,299,244,180,116,844,780,597,533,39, 350,286,231,167,103,90,895,831,584,520,337,273,218,154,882,818,77,635,571, 324,260,205,141,13,869,558,375,192,792,609,298,243,115,843,532,349,166,894, 89,583,272,217,817,634,323,140,868,63,557,374,191,791,608,297,242,114,842, 531,37,348,165,893,88,582,271,216,816,633,322,139,11,867,556,373,190,790,607, 296,241,113,841,530,347,164,892,87,581,270,215,815,632,321,138,866,61,555, 372,189,789,606,295,240,112,840,529,35,346,163,891,86,580,269,214,9,814,631, 320,137,865,554,371,188,788,605,294,239,111,839,528,345,162,890,85,579,268, 213,813,630,319,136,864,59,553,370,187,787,604,293,238,110,838,527,344,33, 161,889,84,578,267,212,812,7,629,318,135,863,552,369,186,786,603,292,237,109, 837,526,343,160,888,83,577,266,211,811,628,317,134,862,57,551,368,185,785, 602,291,236,108,836,525,342,31,159,887,82,576,265,210,810,627,5,316,133,861, 550,367,184,784,601,290,235,107,835,524,341,158,886,81,575,264,209,809,626, 315,132,860,55,549,366,183,783,600,289,234,106,834,523,340,29,157,885,80,574, 263,208,808,625,314,3,131,859,548,365,182,782,599,288,233,105,833,522,339, 156,884,79,573,262,207,807,624,313,130,858,547,53,364,181,781,598,287,232, 104,832,521,338,27,155] [views:debug,2014-08-19T16:51:18.517,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/68. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:18.518,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",68,active,0} [ns_server:debug,2014-08-19T16:51:18.617,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 66. Nacking mccouch update. [views:debug,2014-08-19T16:51:18.618,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/66. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:18.618,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",66,active,0} [ns_server:debug,2014-08-19T16:51:18.619,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101, 829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177, 777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308,253, 125,853,542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201, 801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149, 877,72,566,383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225, 825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96, 773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249,121, 849,538,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197,797, 614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873, 68,562,379,196,796,613,302,247,119,847,536,353,170,93,770,587,276,221,821, 638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92,769, 586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117,845, 534,351,168,91,768,585,274,219,883,819,78,636,572,325,261,206,142,870,806,65, 623,559,376,312,193,129,1,857,793,610,546,363,299,244,180,116,844,780,597, 533,39,350,286,231,167,103,90,895,831,584,520,337,273,218,154,882,818,77,635, 571,324,260,205,141,13,869,558,375,192,792,609,298,243,115,843,532,349,166, 894,89,583,272,217,817,634,323,140,868,63,557,374,191,791,608,297,242,114, 842,531,37,348,165,893,88,582,271,216,816,633,322,139,11,867,556,373,190,790, 607,296,241,113,841,530,347,164,892,87,581,270,215,815,632,321,138,866,61, 555,372,189,789,606,295,240,112,840,529,35,346,163,891,86,580,269,214,9,814, 631,320,137,865,554,371,188,788,605,294,239,111,839,528,345,162,890,85,579, 268,213,813,630,319,136,864,59,553,370,187,787,604,293,238,110,838,527,344, 33,161,889,84,578,267,212,812,7,629,318,135,863,552,369,186,786,603,292,237, 109,837,526,343,160,888,83,577,266,211,811,628,317,134,862,57,551,368,185, 785,602,291,236,108,836,525,342,31,159,887,82,576,265,210,810,627,5,316,133, 861,550,367,184,784,601,290,235,107,835,524,341,158,886,81,575,264,209,809, 626,315,132,860,55,549,366,183,783,600,289,234,106,834,523,340,29,157,885,80, 574,263,208,808,625,314,3,131,859,548,365,182,782,599,288,233,105,833,522, 339,156,884,79,573,262,207,807,624,313,130,858,547,53,364,181,781,598,287, 232,104,832,521,338,27,155] [views:debug,2014-08-19T16:51:18.676,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/66. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:18.677,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",66,active,0} [ns_server:debug,2014-08-19T16:51:18.777,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 64. Nacking mccouch update. [views:debug,2014-08-19T16:51:18.777,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/64. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:18.777,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",64,active,0} [ns_server:debug,2014-08-19T16:51:18.778,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101, 829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177, 777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308,253, 125,853,542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201, 801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149, 877,72,566,383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225, 825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96, 773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249,121, 849,538,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197,797, 614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873, 68,562,379,196,796,613,302,247,119,847,536,353,170,93,770,587,276,221,821, 638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92,769, 586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117,845, 534,351,168,91,768,585,274,219,883,819,78,636,572,325,261,206,142,870,806,65, 623,559,376,312,193,129,1,857,793,610,546,363,299,244,180,116,844,780,597, 533,39,350,286,231,167,103,90,895,831,584,520,337,273,218,154,882,818,77,635, 571,324,260,205,141,13,869,64,558,375,192,792,609,298,243,115,843,532,349, 166,894,89,583,272,217,817,634,323,140,868,63,557,374,191,791,608,297,242, 114,842,531,37,348,165,893,88,582,271,216,816,633,322,139,11,867,556,373,190, 790,607,296,241,113,841,530,347,164,892,87,581,270,215,815,632,321,138,866, 61,555,372,189,789,606,295,240,112,840,529,35,346,163,891,86,580,269,214,9, 814,631,320,137,865,554,371,188,788,605,294,239,111,839,528,345,162,890,85, 579,268,213,813,630,319,136,864,59,553,370,187,787,604,293,238,110,838,527, 344,33,161,889,84,578,267,212,812,7,629,318,135,863,552,369,186,786,603,292, 237,109,837,526,343,160,888,83,577,266,211,811,628,317,134,862,57,551,368, 185,785,602,291,236,108,836,525,342,31,159,887,82,576,265,210,810,627,5,316, 133,861,550,367,184,784,601,290,235,107,835,524,341,158,886,81,575,264,209, 809,626,315,132,860,55,549,366,183,783,600,289,234,106,834,523,340,29,157, 885,80,574,263,208,808,625,314,3,131,859,548,365,182,782,599,288,233,105,833, 522,339,156,884,79,573,262,207,807,624,313,130,858,547,53,364,181,781,598, 287,232,104,832,521,338,27,155] [views:debug,2014-08-19T16:51:18.836,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/64. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:18.836,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",64,active,0} [ns_server:debug,2014-08-19T16:51:18.973,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 62. Nacking mccouch update. [views:debug,2014-08-19T16:51:18.973,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/62. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:18.973,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",62,active,0} [ns_server:debug,2014-08-19T16:51:18.975,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101, 829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177, 777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308,253, 125,853,542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201, 801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149, 877,72,566,383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225, 825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96, 773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249,121, 849,538,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197,797, 614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873, 68,562,379,196,796,613,302,247,119,847,536,353,170,93,770,587,276,221,821, 638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92,769, 586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117,845, 534,351,168,91,768,585,274,219,883,819,78,636,572,325,261,206,142,870,806,65, 623,559,376,312,193,129,1,857,793,610,546,363,299,244,180,116,844,780,597, 533,39,350,286,231,167,103,90,895,831,584,520,337,273,218,154,882,818,77,635, 571,324,260,205,141,13,869,64,558,375,192,792,609,298,243,115,843,532,349, 166,894,89,583,272,217,817,634,323,140,868,63,557,374,191,791,608,297,242, 114,842,531,37,348,165,893,88,582,271,216,816,633,322,139,11,867,62,556,373, 190,790,607,296,241,113,841,530,347,164,892,87,581,270,215,815,632,321,138, 866,61,555,372,189,789,606,295,240,112,840,529,35,346,163,891,86,580,269,214, 9,814,631,320,137,865,554,371,188,788,605,294,239,111,839,528,345,162,890,85, 579,268,213,813,630,319,136,864,59,553,370,187,787,604,293,238,110,838,527, 344,33,161,889,84,578,267,212,812,7,629,318,135,863,552,369,186,786,603,292, 237,109,837,526,343,160,888,83,577,266,211,811,628,317,134,862,57,551,368, 185,785,602,291,236,108,836,525,342,31,159,887,82,576,265,210,810,627,5,316, 133,861,550,367,184,784,601,290,235,107,835,524,341,158,886,81,575,264,209, 809,626,315,132,860,55,549,366,183,783,600,289,234,106,834,523,340,29,157, 885,80,574,263,208,808,625,314,3,131,859,548,365,182,782,599,288,233,105,833, 522,339,156,884,79,573,262,207,807,624,313,130,858,547,53,364,181,781,598, 287,232,104,832,521,338,27,155] [views:debug,2014-08-19T16:51:19.032,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/62. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:19.032,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",62,active,0} [rebalance:info,2014-08-19T16:51:19.079,ns_1@10.242.238.88:<0.18587.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 381 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:19.079,ns_1@10.242.238.88:<0.18524.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 382 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:19.081,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 381 state to active [rebalance:info,2014-08-19T16:51:19.082,ns_1@10.242.238.88:<0.18587.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 381 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:19.082,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 382 state to active [rebalance:info,2014-08-19T16:51:19.083,ns_1@10.242.238.88:<0.18524.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 382 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:19.083,ns_1@10.242.238.88:<0.18587.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:19.084,ns_1@10.242.238.88:<0.18524.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:51:19.156,ns_1@10.242.238.88:<0.20999.0>:ns_orchestrator:handle_info:428]Skipping janitor in state rebalancing [ns_server:debug,2014-08-19T16:51:19.200,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 60. Nacking mccouch update. [views:debug,2014-08-19T16:51:19.200,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/60. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:19.201,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",60,active,0} [ns_server:debug,2014-08-19T16:51:19.202,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101, 829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177, 777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308,253, 125,853,542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201, 801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149, 877,72,566,383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225, 825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96, 773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249,121, 849,538,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197,797, 614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873, 68,562,379,196,796,613,302,247,119,847,536,353,170,93,770,587,276,221,821, 638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92,769, 586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117,845, 534,351,168,91,768,585,274,219,883,819,78,636,572,325,261,206,142,870,806,65, 623,559,376,312,193,129,1,857,793,610,546,363,299,244,180,116,844,780,597, 533,39,350,286,231,167,103,90,895,831,584,520,337,273,218,154,882,818,77,635, 571,324,260,205,141,13,869,64,558,375,192,792,609,298,243,115,843,532,349, 166,894,89,583,272,217,817,634,323,140,868,63,557,374,191,791,608,297,242, 114,842,531,37,348,165,893,88,582,271,216,816,633,322,139,11,867,62,556,373, 190,790,607,296,241,113,841,530,347,164,892,87,581,270,215,815,632,321,138, 866,61,555,372,189,789,606,295,240,112,840,529,35,346,163,891,86,580,269,214, 9,814,631,320,137,865,60,554,371,188,788,605,294,239,111,839,528,345,162,890, 85,579,268,213,813,630,319,136,864,59,553,370,187,787,604,293,238,110,838, 527,344,33,161,889,84,578,267,212,812,7,629,318,135,863,552,369,186,786,603, 292,237,109,837,526,343,160,888,83,577,266,211,811,628,317,134,862,57,551, 368,185,785,602,291,236,108,836,525,342,31,159,887,82,576,265,210,810,627,5, 316,133,861,550,367,184,784,601,290,235,107,835,524,341,158,886,81,575,264, 209,809,626,315,132,860,55,549,366,183,783,600,289,234,106,834,523,340,29, 157,885,80,574,263,208,808,625,314,3,131,859,548,365,182,782,599,288,233,105, 833,522,339,156,884,79,573,262,207,807,624,313,130,858,547,53,364,181,781, 598,287,232,104,832,521,338,27,155] [views:debug,2014-08-19T16:51:19.251,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/60. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:19.251,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",60,active,0} [ns_server:debug,2014-08-19T16:51:19.392,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 58. Nacking mccouch update. [views:debug,2014-08-19T16:51:19.393,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/58. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:19.393,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",58,active,0} [ns_server:debug,2014-08-19T16:51:19.394,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101, 829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177, 777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308,253, 125,853,542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201, 801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149, 877,72,566,383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225, 825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96, 773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249,121, 849,538,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197,797, 614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873, 68,562,379,196,796,613,302,247,119,847,536,353,170,93,770,587,276,221,821, 638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92,769, 586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117,845, 534,351,168,91,768,585,274,219,819,636,325,142,870,806,65,623,559,376,312, 193,129,1,857,793,610,546,363,299,244,180,116,844,780,597,533,39,350,286,231, 167,103,90,895,831,584,520,337,273,218,154,882,818,77,635,571,324,260,205, 141,13,869,64,558,375,192,792,609,298,243,115,843,532,349,166,894,89,583,272, 217,817,634,323,140,868,63,557,374,191,791,608,297,242,114,842,531,37,348, 165,893,88,582,271,216,816,633,322,139,11,867,62,556,373,190,790,607,296,241, 113,841,530,347,164,892,87,581,270,215,815,632,321,138,866,61,555,372,189, 789,606,295,240,112,840,529,35,346,163,891,86,580,269,214,9,814,631,320,137, 865,60,554,371,188,788,605,294,239,111,839,528,345,162,890,85,579,268,213, 813,630,319,136,864,59,553,370,187,787,604,293,238,110,838,527,344,33,161, 889,84,578,267,212,812,7,629,318,135,863,58,552,369,186,786,603,292,237,109, 837,526,343,160,888,83,577,266,211,811,628,317,134,862,57,551,368,185,785, 602,291,236,108,836,525,342,31,159,887,82,576,265,210,810,627,5,316,133,861, 550,367,184,784,601,290,235,107,835,524,341,158,886,81,575,264,209,809,626, 315,132,860,55,549,366,183,783,600,289,234,106,834,523,340,29,157,885,80,574, 263,208,808,625,314,3,131,859,548,365,182,782,599,288,233,105,833,522,339, 156,884,79,573,262,207,807,624,313,130,858,547,53,364,181,781,598,287,232, 104,832,521,338,27,155,883,78,572,261,206] [views:debug,2014-08-19T16:51:19.443,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/58. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:19.443,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",58,active,0} [ns_server:debug,2014-08-19T16:51:19.593,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 56. Nacking mccouch update. [views:debug,2014-08-19T16:51:19.593,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/56. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:19.594,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",56,active,0} [ns_server:debug,2014-08-19T16:51:19.595,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101, 829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177, 777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308,253, 125,853,542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201, 801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149, 877,72,566,383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225, 825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96, 773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249,121, 849,538,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197,797, 614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873, 68,562,379,196,796,613,302,247,119,847,536,353,170,93,770,587,276,221,821, 638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92,769, 586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117,845, 534,351,168,91,768,585,274,219,819,636,325,142,870,806,65,623,559,376,312, 193,129,1,857,793,610,546,363,299,244,180,116,844,780,597,533,39,350,286,231, 167,103,90,895,831,584,520,337,273,218,154,882,818,77,635,571,324,260,205, 141,13,869,64,558,375,192,792,609,298,243,115,843,532,349,166,894,89,583,272, 217,817,634,323,140,868,63,557,374,191,791,608,297,242,114,842,531,37,348, 165,893,88,582,271,216,816,633,322,139,11,867,62,556,373,190,790,607,296,241, 113,841,530,347,164,892,87,581,270,215,815,632,321,138,866,61,555,372,189, 789,606,295,240,112,840,529,35,346,163,891,86,580,269,214,9,814,631,320,137, 865,60,554,371,188,788,605,294,239,111,839,528,345,162,890,85,579,268,213, 813,630,319,136,864,59,553,370,187,787,604,293,238,110,838,527,344,33,161, 889,84,578,267,212,812,7,629,318,135,863,58,552,369,186,786,603,292,237,109, 837,526,343,160,888,83,577,266,211,811,628,317,134,862,57,551,368,185,785, 602,291,236,108,836,525,342,31,159,887,82,576,265,210,810,627,5,316,133,861, 56,550,367,184,784,601,290,235,107,835,524,341,158,886,81,575,264,209,809, 626,315,132,860,55,549,366,183,783,600,289,234,106,834,523,340,29,157,885,80, 574,263,208,808,625,314,3,131,859,548,365,182,782,599,288,233,105,833,522, 339,156,884,79,573,262,207,807,624,313,130,858,547,53,364,181,781,598,287, 232,104,832,521,338,27,155,883,78,572,261,206] [views:debug,2014-08-19T16:51:19.661,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/56. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:19.661,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",56,active,0} [ns_server:debug,2014-08-19T16:51:19.827,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 54. Nacking mccouch update. [views:debug,2014-08-19T16:51:19.827,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/54. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:19.828,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",54,active,0} [ns_server:debug,2014-08-19T16:51:19.829,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101, 829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177, 777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308,253, 125,853,542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201, 801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149, 877,72,566,383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225, 825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96, 773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249,121, 849,538,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197,797, 614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873, 68,562,379,196,796,613,302,247,119,847,536,353,170,93,770,587,276,221,821, 638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92,769, 586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117,845, 534,351,168,91,768,585,274,219,819,636,325,142,870,806,65,623,559,376,312, 193,129,1,857,793,610,546,363,299,244,180,116,844,780,597,533,39,350,286,231, 167,103,90,895,831,584,520,337,273,218,154,882,818,77,635,571,324,260,205, 141,13,869,64,558,375,192,792,609,298,243,115,843,532,349,166,894,89,583,272, 217,817,634,323,140,868,63,557,374,191,791,608,297,242,114,842,531,37,348, 165,893,88,582,271,216,816,633,322,139,11,867,62,556,373,190,790,607,296,241, 113,841,530,347,164,892,87,581,270,215,815,632,321,138,866,61,555,372,189, 789,606,295,240,112,840,529,35,346,163,891,86,580,269,214,9,814,631,320,137, 865,60,554,371,188,788,605,294,239,111,839,528,345,162,890,85,579,268,213, 813,630,319,136,864,59,553,370,187,787,604,293,238,110,838,527,344,33,161, 889,84,578,267,212,812,7,629,318,135,863,58,552,369,186,786,603,292,237,109, 837,526,343,160,888,83,577,266,211,811,628,317,134,862,57,551,368,185,785, 602,291,236,108,836,525,342,31,159,887,82,576,265,210,810,627,5,316,133,861, 56,550,367,184,784,601,290,235,107,835,524,341,158,886,81,575,264,209,809, 626,315,132,860,55,549,366,183,783,600,289,234,106,834,523,340,29,157,885,80, 574,263,208,808,625,314,3,131,859,548,54,365,182,782,599,288,233,105,833,522, 339,156,884,79,573,262,207,807,624,313,130,858,547,53,364,181,781,598,287, 232,104,832,521,338,27,155,883,78,572,261,206] [views:debug,2014-08-19T16:51:19.878,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/54. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:19.878,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",54,active,0} [ns_server:debug,2014-08-19T16:51:19.953,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 52. Nacking mccouch update. [views:debug,2014-08-19T16:51:19.953,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/52. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:19.953,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",52,active,0} [ns_server:debug,2014-08-19T16:51:19.955,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,361,178,778,595,284,229,101, 829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360,177, 777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308,253, 125,853,542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256,201, 801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21,149, 877,72,566,383,200,800,617,306,251,123,851,540,357,174,97,774,591,280,225, 825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356,173,96, 773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249,121, 849,538,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197,797, 614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145,873, 68,562,379,196,796,613,302,247,119,847,536,353,170,93,770,587,276,221,821, 638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92,769, 586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117,845, 534,351,168,91,768,585,274,219,819,636,325,142,870,806,65,623,559,376,312, 193,129,1,857,793,610,546,52,363,299,244,180,116,844,780,597,533,39,350,286, 231,167,103,90,895,831,584,520,337,273,218,154,882,818,77,635,571,324,260, 205,141,13,869,64,558,375,192,792,609,298,243,115,843,532,349,166,894,89,583, 272,217,817,634,323,140,868,63,557,374,191,791,608,297,242,114,842,531,37, 348,165,893,88,582,271,216,816,633,322,139,11,867,62,556,373,190,790,607,296, 241,113,841,530,347,164,892,87,581,270,215,815,632,321,138,866,61,555,372, 189,789,606,295,240,112,840,529,35,346,163,891,86,580,269,214,9,814,631,320, 137,865,60,554,371,188,788,605,294,239,111,839,528,345,162,890,85,579,268, 213,813,630,319,136,864,59,553,370,187,787,604,293,238,110,838,527,344,33, 161,889,84,578,267,212,812,7,629,318,135,863,58,552,369,186,786,603,292,237, 109,837,526,343,160,888,83,577,266,211,811,628,317,134,862,57,551,368,185, 785,602,291,236,108,836,525,342,31,159,887,82,576,265,210,810,627,5,316,133, 861,56,550,367,184,784,601,290,235,107,835,524,341,158,886,81,575,264,209, 809,626,315,132,860,55,549,366,183,783,600,289,234,106,834,523,340,29,157, 885,80,574,263,208,808,625,314,3,131,859,548,54,365,182,782,599,288,233,105, 833,522,339,156,884,79,573,262,207,807,624,313,130,858,547,53,364,181,781, 598,287,232,104,832,521,338,27,155,883,78,572,261,206] [views:debug,2014-08-19T16:51:19.987,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/52. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:19.988,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",52,active,0} [ns_server:debug,2014-08-19T16:51:20.162,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 50. Nacking mccouch update. [views:debug,2014-08-19T16:51:20.162,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/50. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:20.162,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",50,active,0} [ns_server:debug,2014-08-19T16:51:20.164,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256, 201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21, 149,877,72,566,383,200,800,617,306,251,123,851,540,357,174,97,774,591,280, 225,825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356, 173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249, 121,849,538,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197, 797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145, 873,68,562,379,196,796,613,302,247,119,847,536,353,170,93,770,587,276,221, 821,638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92, 769,586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117, 845,534,351,168,91,768,585,274,219,819,636,325,142,870,806,65,623,559,376, 312,193,129,1,857,793,610,546,52,363,299,244,180,116,844,780,597,533,39,350, 286,231,167,103,90,895,831,584,520,337,273,218,154,882,818,77,635,571,324, 260,205,141,13,869,64,558,375,192,792,609,298,243,115,843,532,349,166,894,89, 583,272,217,817,634,323,140,868,63,557,374,191,791,608,297,242,114,842,531, 37,348,165,893,88,582,271,216,816,633,322,139,11,867,62,556,373,190,790,607, 296,241,113,841,530,347,164,892,87,581,270,215,815,632,321,138,866,61,555, 372,189,789,606,295,240,112,840,529,35,346,163,891,86,580,269,214,9,814,631, 320,137,865,60,554,371,188,788,605,294,239,111,839,528,345,162,890,85,579, 268,213,813,630,319,136,864,59,553,370,187,787,604,293,238,110,838,527,344, 33,161,889,84,578,267,212,812,7,629,318,135,863,58,552,369,186,786,603,292, 237,109,837,526,343,160,888,83,577,266,211,811,628,317,134,862,57,551,368, 185,785,602,291,236,108,836,525,342,31,159,887,82,576,265,210,810,627,5,316, 133,861,56,550,367,184,784,601,290,235,107,835,524,341,158,886,81,575,264, 209,809,626,315,132,860,55,549,366,183,783,600,289,234,106,834,523,340,29, 157,885,80,574,263,208,808,625,314,3,131,859,548,54,365,182,782,599,288,233, 105,833,522,339,156,884,79,573,262,207,807,624,313,130,858,547,53,364,181, 781,598,287,232,104,832,521,338,27,155,883,78,572,261,206] [views:debug,2014-08-19T16:51:20.238,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/50. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:20.238,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",50,active,0} [ns_server:debug,2014-08-19T16:51:20.413,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 48. Nacking mccouch update. [views:debug,2014-08-19T16:51:20.413,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/48. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:20.413,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",48,active,0} [ns_server:debug,2014-08-19T16:51:20.414,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256, 201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21, 149,877,72,566,383,200,800,617,306,251,123,851,540,357,174,97,774,591,280, 225,825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356, 173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249, 121,849,538,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197, 797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145, 873,68,562,379,196,796,613,302,247,119,847,536,353,170,93,770,587,276,221, 821,638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92, 769,586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117, 845,534,351,168,91,768,585,274,219,819,636,325,142,870,65,559,376,193,857, 793,610,546,52,363,299,244,180,116,844,780,597,533,39,350,286,231,167,103,90, 895,831,584,520,337,273,218,154,882,818,77,635,571,324,260,205,141,13,869,64, 558,375,192,792,609,298,243,115,843,532,349,166,894,89,583,272,217,817,634, 323,140,868,63,557,374,191,791,608,297,242,114,842,531,37,348,165,893,88,582, 271,216,816,633,322,139,11,867,62,556,373,190,790,607,296,241,113,841,530, 347,164,892,87,581,270,215,815,632,321,138,866,61,555,372,189,789,606,295, 240,112,840,529,35,346,163,891,86,580,269,214,9,814,631,320,137,865,60,554, 371,188,788,605,294,239,111,839,528,345,162,890,85,579,268,213,813,630,319, 136,864,59,553,370,187,787,604,293,238,110,838,527,344,33,161,889,84,578,267, 212,812,7,629,318,135,863,58,552,369,186,786,603,292,237,109,837,526,343,160, 888,83,577,266,211,811,628,317,134,862,57,551,368,185,785,602,291,236,108, 836,525,342,31,159,887,82,576,265,210,810,627,5,316,133,861,56,550,367,184, 784,601,290,235,107,835,524,341,158,886,81,575,264,209,809,626,315,132,860, 55,549,366,183,783,600,289,234,106,834,523,340,29,157,885,80,574,263,208,808, 625,314,3,131,859,548,54,365,182,782,599,288,233,105,833,522,339,156,884,79, 573,262,207,807,624,313,130,858,547,53,364,181,781,598,287,232,104,832,521, 338,27,155,883,78,572,261,206,806,623,312,129,1] [views:debug,2014-08-19T16:51:20.488,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/48. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:20.488,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",48,active,0} [ns_server:debug,2014-08-19T16:51:20.580,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 46. Nacking mccouch update. [views:debug,2014-08-19T16:51:20.580,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/46. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:20.581,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",46,active,0} [ns_server:debug,2014-08-19T16:51:20.582,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256, 201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21, 149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774,591,280, 225,825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356, 173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249, 121,849,538,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197, 797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145, 873,68,562,379,196,796,613,302,247,119,847,536,353,170,93,770,587,276,221, 821,638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92, 769,586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117, 845,534,351,168,91,768,585,274,219,819,636,325,142,870,65,559,376,193,857, 793,610,546,52,363,299,244,180,116,844,780,597,533,39,350,286,231,167,103,90, 895,831,584,520,337,273,218,154,882,818,77,635,571,324,260,205,141,13,869,64, 558,375,192,792,609,298,243,115,843,532,349,166,894,89,583,272,217,817,634, 323,140,868,63,557,374,191,791,608,297,242,114,842,531,37,348,165,893,88,582, 271,216,816,633,322,139,11,867,62,556,373,190,790,607,296,241,113,841,530, 347,164,892,87,581,270,215,815,632,321,138,866,61,555,372,189,789,606,295, 240,112,840,529,35,346,163,891,86,580,269,214,9,814,631,320,137,865,60,554, 371,188,788,605,294,239,111,839,528,345,162,890,85,579,268,213,813,630,319, 136,864,59,553,370,187,787,604,293,238,110,838,527,344,33,161,889,84,578,267, 212,812,7,629,318,135,863,58,552,369,186,786,603,292,237,109,837,526,343,160, 888,83,577,266,211,811,628,317,134,862,57,551,368,185,785,602,291,236,108, 836,525,342,31,159,887,82,576,265,210,810,627,5,316,133,861,56,550,367,184, 784,601,290,235,107,835,524,341,158,886,81,575,264,209,809,626,315,132,860, 55,549,366,183,783,600,289,234,106,834,523,340,29,157,885,80,574,263,208,808, 625,314,3,131,859,548,54,365,182,782,599,288,233,105,833,522,339,156,884,79, 573,262,207,807,624,313,130,858,547,53,364,181,781,598,287,232,104,832,521, 338,27,155,883,78,572,261,206,806,623,312,129,1] [views:debug,2014-08-19T16:51:20.656,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/46. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:20.656,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",46,active,0} [ns_server:debug,2014-08-19T16:51:20.753,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 44. Nacking mccouch update. [views:debug,2014-08-19T16:51:20.753,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/44. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:20.753,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",44,active,0} [ns_server:debug,2014-08-19T16:51:20.754,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256, 201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21, 149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774,591,280, 225,825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356, 173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249, 121,849,538,44,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197, 797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145, 873,68,562,379,196,796,613,302,247,119,847,536,353,170,93,770,587,276,221, 821,638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92, 769,586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117, 845,534,351,168,91,768,585,274,219,819,636,325,142,870,65,559,376,193,857, 793,610,546,52,363,299,244,180,116,844,780,597,533,39,350,286,231,167,103,90, 895,831,584,520,337,273,218,154,882,818,77,635,571,324,260,205,141,13,869,64, 558,375,192,792,609,298,243,115,843,532,349,166,894,89,583,272,217,817,634, 323,140,868,63,557,374,191,791,608,297,242,114,842,531,37,348,165,893,88,582, 271,216,816,633,322,139,11,867,62,556,373,190,790,607,296,241,113,841,530, 347,164,892,87,581,270,215,815,632,321,138,866,61,555,372,189,789,606,295, 240,112,840,529,35,346,163,891,86,580,269,214,9,814,631,320,137,865,60,554, 371,188,788,605,294,239,111,839,528,345,162,890,85,579,268,213,813,630,319, 136,864,59,553,370,187,787,604,293,238,110,838,527,344,33,161,889,84,578,267, 212,812,7,629,318,135,863,58,552,369,186,786,603,292,237,109,837,526,343,160, 888,83,577,266,211,811,628,317,134,862,57,551,368,185,785,602,291,236,108, 836,525,342,31,159,887,82,576,265,210,810,627,5,316,133,861,56,550,367,184, 784,601,290,235,107,835,524,341,158,886,81,575,264,209,809,626,315,132,860, 55,549,366,183,783,600,289,234,106,834,523,340,29,157,885,80,574,263,208,808, 625,314,3,131,859,548,54,365,182,782,599,288,233,105,833,522,339,156,884,79, 573,262,207,807,624,313,130,858,547,53,364,181,781,598,287,232,104,832,521, 338,27,155,883,78,572,261,206,806,623,312,129,1] [views:debug,2014-08-19T16:51:20.787,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/44. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:20.787,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",44,active,0} [ns_server:debug,2014-08-19T16:51:20.862,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 42. Nacking mccouch update. [views:debug,2014-08-19T16:51:20.862,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/42. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:20.862,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",42,active,0} [ns_server:debug,2014-08-19T16:51:20.863,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256, 201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21, 149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774,591,280, 225,825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356, 173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249, 121,849,538,44,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197, 797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145, 873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93,770,587,276,221, 821,638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92, 769,586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117, 845,534,351,168,91,768,585,274,219,819,636,325,142,870,65,559,376,193,857, 793,610,546,52,363,299,244,180,116,844,780,597,533,39,350,286,231,167,103,90, 895,831,584,520,337,273,218,154,882,818,77,635,571,324,260,205,141,13,869,64, 558,375,192,792,609,298,243,115,843,532,349,166,894,89,583,272,217,817,634, 323,140,868,63,557,374,191,791,608,297,242,114,842,531,37,348,165,893,88,582, 271,216,816,633,322,139,11,867,62,556,373,190,790,607,296,241,113,841,530, 347,164,892,87,581,270,215,815,632,321,138,866,61,555,372,189,789,606,295, 240,112,840,529,35,346,163,891,86,580,269,214,9,814,631,320,137,865,60,554, 371,188,788,605,294,239,111,839,528,345,162,890,85,579,268,213,813,630,319, 136,864,59,553,370,187,787,604,293,238,110,838,527,344,33,161,889,84,578,267, 212,812,7,629,318,135,863,58,552,369,186,786,603,292,237,109,837,526,343,160, 888,83,577,266,211,811,628,317,134,862,57,551,368,185,785,602,291,236,108, 836,525,342,31,159,887,82,576,265,210,810,627,5,316,133,861,56,550,367,184, 784,601,290,235,107,835,524,341,158,886,81,575,264,209,809,626,315,132,860, 55,549,366,183,783,600,289,234,106,834,523,340,29,157,885,80,574,263,208,808, 625,314,3,131,859,548,54,365,182,782,599,288,233,105,833,522,339,156,884,79, 573,262,207,807,624,313,130,858,547,53,364,181,781,598,287,232,104,832,521, 338,27,155,883,78,572,261,206,806,623,312,129,1] [views:debug,2014-08-19T16:51:20.896,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/42. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:20.896,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",42,active,0} [ns_server:debug,2014-08-19T16:51:20.971,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 40. Nacking mccouch update. [views:debug,2014-08-19T16:51:20.971,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/40. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:20.971,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",40,active,0} [ns_server:debug,2014-08-19T16:51:20.973,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256, 201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21, 149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774,591,280, 225,825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356, 173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249, 121,849,538,44,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197, 797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145, 873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93,770,587,276,221, 821,638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92, 769,586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117, 845,534,40,351,168,91,768,585,274,219,819,636,325,142,870,65,559,376,193,857, 793,610,546,52,363,299,244,180,116,844,780,597,533,39,350,286,231,167,103,90, 895,831,584,520,337,273,218,154,882,818,77,635,571,324,260,205,141,13,869,64, 558,375,192,792,609,298,243,115,843,532,349,166,894,89,583,272,217,817,634, 323,140,868,63,557,374,191,791,608,297,242,114,842,531,37,348,165,893,88,582, 271,216,816,633,322,139,11,867,62,556,373,190,790,607,296,241,113,841,530, 347,164,892,87,581,270,215,815,632,321,138,866,61,555,372,189,789,606,295, 240,112,840,529,35,346,163,891,86,580,269,214,9,814,631,320,137,865,60,554, 371,188,788,605,294,239,111,839,528,345,162,890,85,579,268,213,813,630,319, 136,864,59,553,370,187,787,604,293,238,110,838,527,344,33,161,889,84,578,267, 212,812,7,629,318,135,863,58,552,369,186,786,603,292,237,109,837,526,343,160, 888,83,577,266,211,811,628,317,134,862,57,551,368,185,785,602,291,236,108, 836,525,342,31,159,887,82,576,265,210,810,627,5,316,133,861,56,550,367,184, 784,601,290,235,107,835,524,341,158,886,81,575,264,209,809,626,315,132,860, 55,549,366,183,783,600,289,234,106,834,523,340,29,157,885,80,574,263,208,808, 625,314,3,131,859,548,54,365,182,782,599,288,233,105,833,522,339,156,884,79, 573,262,207,807,624,313,130,858,547,53,364,181,781,598,287,232,104,832,521, 338,27,155,883,78,572,261,206,806,623,312,129,1] [views:debug,2014-08-19T16:51:21.005,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/40. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:21.005,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",40,active,0} [ns_server:debug,2014-08-19T16:51:21.080,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 38. Nacking mccouch update. [views:debug,2014-08-19T16:51:21.080,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/38. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:21.080,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",38,active,0} [ns_server:debug,2014-08-19T16:51:21.082,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256, 201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21, 149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774,591,280, 225,825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356, 173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249, 121,849,538,44,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197, 797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145, 873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93,770,587,276,221, 821,638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92, 769,586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117, 845,534,40,351,168,91,768,585,274,219,819,636,325,142,870,65,559,376,193,793, 610,299,244,116,844,780,597,533,39,350,286,231,167,103,90,895,831,584,520, 337,273,218,154,882,818,77,635,571,324,260,205,141,13,869,64,558,375,192,792, 609,298,243,115,843,532,38,349,166,894,89,583,272,217,817,634,323,140,868,63, 557,374,191,791,608,297,242,114,842,531,37,348,165,893,88,582,271,216,816, 633,322,139,11,867,62,556,373,190,790,607,296,241,113,841,530,347,164,892,87, 581,270,215,815,632,321,138,866,61,555,372,189,789,606,295,240,112,840,529, 35,346,163,891,86,580,269,214,9,814,631,320,137,865,60,554,371,188,788,605, 294,239,111,839,528,345,162,890,85,579,268,213,813,630,319,136,864,59,553, 370,187,787,604,293,238,110,838,527,344,33,161,889,84,578,267,212,812,7,629, 318,135,863,58,552,369,186,786,603,292,237,109,837,526,343,160,888,83,577, 266,211,811,628,317,134,862,57,551,368,185,785,602,291,236,108,836,525,342, 31,159,887,82,576,265,210,810,627,5,316,133,861,56,550,367,184,784,601,290, 235,107,835,524,341,158,886,81,575,264,209,809,626,315,132,860,55,549,366, 183,783,600,289,234,106,834,523,340,29,157,885,80,574,263,208,808,625,314,3, 131,859,548,54,365,182,782,599,288,233,105,833,522,339,156,884,79,573,262, 207,807,624,313,130,858,547,53,364,181,781,598,287,232,104,832,521,338,27, 155,883,78,572,261,206,806,623,312,129,1,857,546,52,363,180] [rebalance:info,2014-08-19T16:51:21.113,ns_1@10.242.238.88:<0.18783.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 634 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:21.113,ns_1@10.242.238.88:<0.18447.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 383 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:21.114,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 634 state to active [views:debug,2014-08-19T16:51:21.114,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/38. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:21.114,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",38,active,0} [rebalance:info,2014-08-19T16:51:21.115,ns_1@10.242.238.88:<0.18783.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 634 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:21.115,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 383 state to active [rebalance:info,2014-08-19T16:51:21.117,ns_1@10.242.238.88:<0.18447.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 383 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:21.118,ns_1@10.242.238.88:<0.18783.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:21.118,ns_1@10.242.238.88:<0.18447.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:21.179,ns_1@10.242.238.88:<0.18706.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 635 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:21.179,ns_1@10.242.238.88:<0.18643.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 636 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:21.179,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 635 state to active [rebalance:info,2014-08-19T16:51:21.180,ns_1@10.242.238.88:<0.18706.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 635 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:21.180,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 636 state to active [rebalance:info,2014-08-19T16:51:21.181,ns_1@10.242.238.88:<0.18643.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 636 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:21.182,ns_1@10.242.238.88:<0.18706.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:21.182,ns_1@10.242.238.88:<0.18643.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:21.206,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 36. Nacking mccouch update. [views:debug,2014-08-19T16:51:21.206,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/36. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:21.206,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",36,active,0} [ns_server:debug,2014-08-19T16:51:21.208,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256, 201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21, 149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774,591,280, 225,825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356, 173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249, 121,849,538,44,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197, 797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145, 873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93,770,587,276,221, 821,638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92, 769,586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117, 845,534,40,351,168,91,768,585,274,219,819,636,325,142,870,65,559,376,193,793, 610,299,244,116,844,780,597,533,39,350,286,231,167,103,90,895,831,584,520, 337,273,218,154,882,818,77,635,571,324,260,205,141,13,869,64,558,375,192,792, 609,298,243,115,843,532,38,349,166,894,89,583,272,217,817,634,323,140,868,63, 557,374,191,791,608,297,242,114,842,531,37,348,165,893,88,582,271,216,816, 633,322,139,11,867,62,556,373,190,790,607,296,241,113,841,530,36,347,164,892, 87,581,270,215,815,632,321,138,866,61,555,372,189,789,606,295,240,112,840, 529,35,346,163,891,86,580,269,214,9,814,631,320,137,865,60,554,371,188,788, 605,294,239,111,839,528,345,162,890,85,579,268,213,813,630,319,136,864,59, 553,370,187,787,604,293,238,110,838,527,344,33,161,889,84,578,267,212,812,7, 629,318,135,863,58,552,369,186,786,603,292,237,109,837,526,343,160,888,83, 577,266,211,811,628,317,134,862,57,551,368,185,785,602,291,236,108,836,525, 342,31,159,887,82,576,265,210,810,627,5,316,133,861,56,550,367,184,784,601, 290,235,107,835,524,341,158,886,81,575,264,209,809,626,315,132,860,55,549, 366,183,783,600,289,234,106,834,523,340,29,157,885,80,574,263,208,808,625, 314,3,131,859,548,54,365,182,782,599,288,233,105,833,522,339,156,884,79,573, 262,207,807,624,313,130,858,547,53,364,181,781,598,287,232,104,832,521,338, 27,155,883,78,572,261,206,806,623,312,129,1,857,546,52,363,180] [views:debug,2014-08-19T16:51:21.290,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/36. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:21.290,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",36,active,0} [rebalance:info,2014-08-19T16:51:21.301,ns_1@10.242.238.88:<0.18566.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 637 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:21.301,ns_1@10.242.238.88:<0.18489.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 638 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:21.302,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 637 state to active [rebalance:info,2014-08-19T16:51:21.303,ns_1@10.242.238.88:<0.18566.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 637 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:21.303,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 638 state to active [rebalance:info,2014-08-19T16:51:21.304,ns_1@10.242.238.88:<0.18489.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 638 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:21.304,ns_1@10.242.238.88:<0.18566.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:21.305,ns_1@10.242.238.88:<0.18489.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:21.451,ns_1@10.242.238.88:<0.18390.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 639 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:21.451,ns_1@10.242.238.88:<0.18748.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 890 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:21.452,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 639 state to active [rebalance:info,2014-08-19T16:51:21.453,ns_1@10.242.238.88:<0.18390.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 639 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:21.453,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 890 state to active [rebalance:info,2014-08-19T16:51:21.454,ns_1@10.242.238.88:<0.18748.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 890 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:21.454,ns_1@10.242.238.88:<0.18390.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:21.455,ns_1@10.242.238.88:<0.18748.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:21.465,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 34. Nacking mccouch update. [views:debug,2014-08-19T16:51:21.465,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/34. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:21.466,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",34,active,0} [ns_server:debug,2014-08-19T16:51:21.467,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256, 201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21, 149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774,591,280, 225,825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356, 173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249, 121,849,538,44,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197, 797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145, 873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93,770,587,276,221, 821,638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92, 769,586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117, 845,534,40,351,168,91,768,585,274,219,819,636,325,142,870,65,559,376,193,793, 610,299,244,116,844,780,597,533,39,350,286,231,167,103,90,895,831,584,520, 337,273,218,154,882,818,77,635,571,324,260,205,141,13,869,64,558,375,192,792, 609,298,243,115,843,532,38,349,166,894,89,583,272,217,817,634,323,140,868,63, 557,374,191,791,608,297,242,114,842,531,37,348,165,893,88,582,271,216,816, 633,322,139,11,867,62,556,373,190,790,607,296,241,113,841,530,36,347,164,892, 87,581,270,215,815,632,321,138,866,61,555,372,189,789,606,295,240,112,840, 529,35,346,163,891,86,580,269,214,9,814,631,320,137,865,60,554,371,188,788, 605,294,239,111,839,528,345,34,162,890,85,579,268,213,813,630,319,136,864,59, 553,370,187,787,604,293,238,110,838,527,344,33,161,889,84,578,267,212,812,7, 629,318,135,863,58,552,369,186,786,603,292,237,109,837,526,343,160,888,83, 577,266,211,811,628,317,134,862,57,551,368,185,785,602,291,236,108,836,525, 342,31,159,887,82,576,265,210,810,627,5,316,133,861,56,550,367,184,784,601, 290,235,107,835,524,341,158,886,81,575,264,209,809,626,315,132,860,55,549, 366,183,783,600,289,234,106,834,523,340,29,157,885,80,574,263,208,808,625, 314,3,131,859,548,54,365,182,782,599,288,233,105,833,522,339,156,884,79,573, 262,207,807,624,313,130,858,547,53,364,181,781,598,287,232,104,832,521,338, 27,155,883,78,572,261,206,806,623,312,129,1,857,546,52,363,180] [views:debug,2014-08-19T16:51:21.541,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/34. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:21.541,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",34,active,0} [rebalance:info,2014-08-19T16:51:21.585,ns_1@10.242.238.88:<0.18608.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 892 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:21.585,ns_1@10.242.238.88:<0.18685.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 891 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:21.585,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 892 state to active [rebalance:info,2014-08-19T16:51:21.586,ns_1@10.242.238.88:<0.18608.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 892 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:21.587,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 891 state to active [rebalance:info,2014-08-19T16:51:21.588,ns_1@10.242.238.88:<0.18685.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 891 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:21.588,ns_1@10.242.238.88:<0.18608.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:21.588,ns_1@10.242.238.88:<0.18685.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:21.682,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 32. Nacking mccouch update. [views:debug,2014-08-19T16:51:21.683,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/32. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:21.683,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",32,active,0} [ns_server:debug,2014-08-19T16:51:21.684,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256, 201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21, 149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774,591,280, 225,825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356, 173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249, 121,849,538,44,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197, 797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145, 873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93,770,587,276,221, 821,638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92, 769,586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117, 845,534,40,351,168,91,768,585,274,219,819,636,325,142,870,65,559,376,193,793, 610,299,244,116,844,780,597,533,39,350,286,231,167,103,90,895,831,584,520, 337,273,218,154,882,818,77,635,571,324,260,205,141,13,869,64,558,375,192,792, 609,298,243,115,843,532,38,349,166,894,89,583,272,217,817,634,323,140,868,63, 557,374,191,791,608,297,242,114,842,531,37,348,165,893,88,582,271,216,816, 633,322,139,11,867,62,556,373,190,790,607,296,241,113,841,530,36,347,164,892, 87,581,270,215,815,632,321,138,866,61,555,372,189,789,606,295,240,112,840, 529,35,346,163,891,86,580,269,214,9,814,631,320,137,865,60,554,371,188,788, 605,294,239,111,839,528,345,34,162,890,85,579,268,213,813,630,319,136,864,59, 553,370,187,787,604,293,238,110,838,527,344,33,161,889,84,578,267,212,812,7, 629,318,135,863,58,552,369,186,786,603,292,237,109,837,526,343,32,160,888,83, 577,266,211,811,628,317,134,862,57,551,368,185,785,602,291,236,108,836,525, 342,31,159,887,82,576,265,210,810,627,5,316,133,861,56,550,367,184,784,601, 290,235,107,835,524,341,158,886,81,575,264,209,809,626,315,132,860,55,549, 366,183,783,600,289,234,106,834,523,340,29,157,885,80,574,263,208,808,625, 314,3,131,859,548,54,365,182,782,599,288,233,105,833,522,339,156,884,79,573, 262,207,807,624,313,130,858,547,53,364,181,781,598,287,232,104,832,521,338, 27,155,883,78,572,261,206,806,623,312,129,1,857,546,52,363,180] [rebalance:info,2014-08-19T16:51:21.685,ns_1@10.242.238.88:<0.18468.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 894 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:21.686,ns_1@10.242.238.88:<0.18545.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 893 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:21.686,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 894 state to active [rebalance:info,2014-08-19T16:51:21.687,ns_1@10.242.238.88:<0.18468.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 894 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:21.687,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 893 state to active [rebalance:info,2014-08-19T16:51:21.688,ns_1@10.242.238.88:<0.18545.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 893 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:21.689,ns_1@10.242.238.88:<0.18468.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:21.689,ns_1@10.242.238.88:<0.18545.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:51:21.758,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/32. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:21.758,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",32,active,0} [rebalance:info,2014-08-19T16:51:21.844,ns_1@10.242.238.88:<0.18426.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 895 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:21.845,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 895 state to active [rebalance:info,2014-08-19T16:51:21.846,ns_1@10.242.238.88:<0.18426.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 895 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:21.846,ns_1@10.242.238.88:<0.18426.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:21.919,ns_1@10.242.238.88:<0.19883.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 364 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:21.920,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 364 state to active [rebalance:info,2014-08-19T16:51:21.921,ns_1@10.242.238.88:<0.19883.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 364 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:21.922,ns_1@10.242.238.88:<0.19883.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:21.943,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 30. Nacking mccouch update. [views:debug,2014-08-19T16:51:21.943,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/30. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:21.943,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",30,active,0} [ns_server:debug,2014-08-19T16:51:21.945,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256, 201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21, 149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774,591,280, 225,825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356, 173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249, 121,849,538,44,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197, 797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145, 873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93,770,587,276,221, 821,638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92, 769,586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117, 845,534,40,351,168,91,768,585,274,219,819,636,325,142,870,65,559,376,193,793, 610,299,244,116,844,780,597,533,39,350,286,231,167,103,90,895,831,584,520, 337,273,218,154,882,818,77,635,571,324,260,205,141,13,869,64,558,375,192,792, 609,298,243,115,843,532,38,349,166,894,89,583,272,217,817,634,323,140,868,63, 557,374,191,791,608,297,242,114,842,531,37,348,165,893,88,582,271,216,816, 633,322,139,11,867,62,556,373,190,790,607,296,241,113,841,530,36,347,164,892, 87,581,270,215,815,632,321,138,866,61,555,372,189,789,606,295,240,112,840, 529,35,346,163,891,86,580,269,214,9,814,631,320,137,865,60,554,371,188,788, 605,294,239,111,839,528,345,34,162,890,85,579,268,213,813,630,319,136,864,59, 553,370,187,787,604,293,238,110,838,527,344,33,161,889,84,578,267,212,812,7, 629,318,135,863,58,552,369,186,786,603,292,237,109,837,526,343,32,160,888,83, 577,266,211,811,628,317,134,862,57,551,368,185,785,602,291,236,108,836,525, 342,31,159,887,82,576,265,210,810,627,5,316,133,861,56,550,367,184,784,601, 290,235,107,835,524,341,30,158,886,81,575,264,209,809,626,315,132,860,55,549, 366,183,783,600,289,234,106,834,523,340,29,157,885,80,574,263,208,808,625, 314,3,131,859,548,54,365,182,782,599,288,233,105,833,522,339,156,884,79,573, 262,207,807,624,313,130,858,547,53,364,181,781,598,287,232,104,832,521,338, 27,155,883,78,572,261,206,806,623,312,129,1,857,546,52,363,180] [rebalance:info,2014-08-19T16:51:21.986,ns_1@10.242.238.88:<0.19704.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 366 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:21.987,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 366 state to active [rebalance:info,2014-08-19T16:51:21.988,ns_1@10.242.238.88:<0.19704.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 366 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:21.988,ns_1@10.242.238.88:<0.19704.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:51:22.027,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/30. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:22.027,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",30,active,0} [rebalance:info,2014-08-19T16:51:22.061,ns_1@10.242.238.88:<0.19563.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 368 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:22.062,ns_1@10.242.238.88:<0.20032.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 363 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:22.062,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 368 state to active [rebalance:info,2014-08-19T16:51:22.063,ns_1@10.242.238.88:<0.19563.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 368 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:22.063,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 363 state to active [rebalance:info,2014-08-19T16:51:22.064,ns_1@10.242.238.88:<0.20032.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 363 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:22.065,ns_1@10.242.238.88:<0.19563.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:22.065,ns_1@10.242.238.88:<0.20032.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:22.196,ns_1@10.242.238.88:<0.19409.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 370 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:22.196,ns_1@10.242.238.88:<0.19796.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 365 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:22.196,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 370 state to active [rebalance:info,2014-08-19T16:51:22.197,ns_1@10.242.238.88:<0.19409.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 370 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:22.197,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 365 state to active [rebalance:info,2014-08-19T16:51:22.198,ns_1@10.242.238.88:<0.19796.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 365 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:22.198,ns_1@10.242.238.88:<0.19409.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:22.199,ns_1@10.242.238.88:<0.19796.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:22.202,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 28. Nacking mccouch update. [views:debug,2014-08-19T16:51:22.202,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/28. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:22.202,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",28,active,0} [ns_server:debug,2014-08-19T16:51:22.204,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256, 201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21, 149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774,591,280, 225,825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356, 173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249, 121,849,538,44,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197, 797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145, 873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93,770,587,276,221, 821,638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92, 769,586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117, 845,534,40,351,168,91,768,585,274,219,819,636,325,142,870,65,559,376,193,793, 610,299,244,116,844,533,39,350,167,90,895,831,584,520,337,273,218,154,882, 818,77,635,571,324,260,205,141,13,869,64,558,375,192,792,609,298,243,115,843, 532,38,349,166,894,89,583,272,217,817,634,323,140,868,63,557,374,191,791,608, 297,242,114,842,531,37,348,165,893,88,582,271,216,816,633,322,139,11,867,62, 556,373,190,790,607,296,241,113,841,530,36,347,164,892,87,581,270,215,815, 632,321,138,866,61,555,372,189,789,606,295,240,112,840,529,35,346,163,891,86, 580,269,214,9,814,631,320,137,865,60,554,371,188,788,605,294,239,111,839,528, 345,34,162,890,85,579,268,213,813,630,319,136,864,59,553,370,187,787,604,293, 238,110,838,527,344,33,161,889,84,578,267,212,812,7,629,318,135,863,58,552, 369,186,786,603,292,237,109,837,526,343,32,160,888,83,577,266,211,811,628, 317,134,862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,82,576, 265,210,810,627,5,316,133,861,56,550,367,184,784,601,290,235,107,835,524,341, 30,158,886,81,575,264,209,809,626,315,132,860,55,549,366,183,783,600,289,234, 106,834,523,340,29,157,885,80,574,263,208,808,625,314,3,131,859,548,54,365, 182,782,599,288,233,105,833,522,339,28,156,884,79,573,262,207,807,624,313, 130,858,547,53,364,181,781,598,287,232,104,832,521,338,27,155,883,78,572,261, 206,806,623,312,129,1,857,546,52,363,180,780,597,286,231,103] [views:debug,2014-08-19T16:51:22.290,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/28. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:22.290,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",28,active,0} [rebalance:info,2014-08-19T16:51:22.327,ns_1@10.242.238.88:<0.19269.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 372 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:22.328,ns_1@10.242.238.88:<0.19640.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 367 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:22.328,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 372 state to active [rebalance:info,2014-08-19T16:51:22.329,ns_1@10.242.238.88:<0.19269.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 372 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:22.329,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 367 state to active [rebalance:info,2014-08-19T16:51:22.330,ns_1@10.242.238.88:<0.19640.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 367 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:22.331,ns_1@10.242.238.88:<0.19269.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:22.331,ns_1@10.242.238.88:<0.19640.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:22.390,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 26. Nacking mccouch update. [views:debug,2014-08-19T16:51:22.390,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/26. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:22.390,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",26,active,0} [ns_server:debug,2014-08-19T16:51:22.391,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256, 201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21, 149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774,591,280, 225,825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356, 173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249, 121,849,538,44,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197, 797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145, 873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93,770,587,276,221, 821,638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92, 769,586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117, 845,534,40,351,168,91,768,585,274,219,819,636,325,142,870,65,559,376,193,793, 610,299,244,116,844,533,39,350,167,90,895,831,584,520,337,273,26,218,154,882, 818,77,635,571,324,260,205,141,13,869,64,558,375,192,792,609,298,243,115,843, 532,38,349,166,894,89,583,272,217,817,634,323,140,868,63,557,374,191,791,608, 297,242,114,842,531,37,348,165,893,88,582,271,216,816,633,322,139,11,867,62, 556,373,190,790,607,296,241,113,841,530,36,347,164,892,87,581,270,215,815, 632,321,138,866,61,555,372,189,789,606,295,240,112,840,529,35,346,163,891,86, 580,269,214,9,814,631,320,137,865,60,554,371,188,788,605,294,239,111,839,528, 345,34,162,890,85,579,268,213,813,630,319,136,864,59,553,370,187,787,604,293, 238,110,838,527,344,33,161,889,84,578,267,212,812,7,629,318,135,863,58,552, 369,186,786,603,292,237,109,837,526,343,32,160,888,83,577,266,211,811,628, 317,134,862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,82,576, 265,210,810,627,5,316,133,861,56,550,367,184,784,601,290,235,107,835,524,341, 30,158,886,81,575,264,209,809,626,315,132,860,55,549,366,183,783,600,289,234, 106,834,523,340,29,157,885,80,574,263,208,808,625,314,3,131,859,548,54,365, 182,782,599,288,233,105,833,522,339,28,156,884,79,573,262,207,807,624,313, 130,858,547,53,364,181,781,598,287,232,104,832,521,338,27,155,883,78,572,261, 206,806,623,312,129,1,857,546,52,363,180,780,597,286,231,103] [rebalance:info,2014-08-19T16:51:22.394,ns_1@10.242.238.88:<0.19111.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 374 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:22.394,ns_1@10.242.238.88:<0.19486.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 369 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:22.395,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 374 state to active [rebalance:info,2014-08-19T16:51:22.396,ns_1@10.242.238.88:<0.19111.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 374 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:22.396,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 369 state to active [rebalance:info,2014-08-19T16:51:22.397,ns_1@10.242.238.88:<0.19486.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 369 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:22.397,ns_1@10.242.238.88:<0.19111.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:22.398,ns_1@10.242.238.88:<0.19486.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:51:22.424,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/26. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:22.425,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",26,active,0} [rebalance:info,2014-08-19T16:51:22.462,ns_1@10.242.238.88:<0.19346.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 371 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:22.462,ns_1@10.242.238.88:<0.18944.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 376 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:22.462,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 371 state to active [rebalance:info,2014-08-19T16:51:22.463,ns_1@10.242.238.88:<0.19346.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 371 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:22.464,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 376 state to active [rebalance:info,2014-08-19T16:51:22.465,ns_1@10.242.238.88:<0.18944.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 376 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:22.465,ns_1@10.242.238.88:<0.19346.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:22.465,ns_1@10.242.238.88:<0.18944.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:22.499,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 24. Nacking mccouch update. [views:debug,2014-08-19T16:51:22.499,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/24. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:22.499,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",24,active,0} [ns_server:debug,2014-08-19T16:51:22.500,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,24,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,150,878,73,567,256, 201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515,332,21, 149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774,591,280, 225,825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539,45,356, 173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615,304,249, 121,849,538,44,355,172,95,772,589,278,223,823,512,329,146,874,69,563,380,197, 797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639,328,17,145, 873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93,770,587,276,221, 821,638,327,144,872,67,561,378,195,795,612,301,246,118,846,535,41,352,169,92, 769,586,275,220,820,637,326,15,143,871,66,560,377,194,794,611,300,245,117, 845,534,40,351,168,91,768,585,274,219,819,636,325,142,870,65,559,376,193,793, 610,299,244,116,844,533,39,350,167,90,895,831,584,520,337,273,26,218,154,882, 818,77,635,571,324,260,205,141,13,869,64,558,375,192,792,609,298,243,115,843, 532,38,349,166,894,89,583,272,217,817,634,323,140,868,63,557,374,191,791,608, 297,242,114,842,531,37,348,165,893,88,582,271,216,816,633,322,139,11,867,62, 556,373,190,790,607,296,241,113,841,530,36,347,164,892,87,581,270,215,815, 632,321,138,866,61,555,372,189,789,606,295,240,112,840,529,35,346,163,891,86, 580,269,214,9,814,631,320,137,865,60,554,371,188,788,605,294,239,111,839,528, 345,34,162,890,85,579,268,213,813,630,319,136,864,59,553,370,187,787,604,293, 238,110,838,527,344,33,161,889,84,578,267,212,812,7,629,318,135,863,58,552, 369,186,786,603,292,237,109,837,526,343,32,160,888,83,577,266,211,811,628, 317,134,862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,82,576, 265,210,810,627,5,316,133,861,56,550,367,184,784,601,290,235,107,835,524,341, 30,158,886,81,575,264,209,809,626,315,132,860,55,549,366,183,783,600,289,234, 106,834,523,340,29,157,885,80,574,263,208,808,625,314,3,131,859,548,54,365, 182,782,599,288,233,105,833,522,339,28,156,884,79,573,262,207,807,624,313, 130,858,547,53,364,181,781,598,287,232,104,832,521,338,27,155,883,78,572,261, 206,806,623,312,129,1,857,546,52,363,180,780,597,286,231,103] [rebalance:info,2014-08-19T16:51:22.529,ns_1@10.242.238.88:<0.19191.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 373 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:22.529,ns_1@10.242.238.88:<0.18804.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 378 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:22.529,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 373 state to active [rebalance:info,2014-08-19T16:51:22.530,ns_1@10.242.238.88:<0.19191.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 373 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:22.530,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 378 state to active [rebalance:info,2014-08-19T16:51:22.531,ns_1@10.242.238.88:<0.18804.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 378 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:22.532,ns_1@10.242.238.88:<0.19191.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:22.532,ns_1@10.242.238.88:<0.18804.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:51:22.533,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/24. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:22.533,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",24,active,0} [rebalance:info,2014-08-19T16:51:22.596,ns_1@10.242.238.88:<0.19026.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 375 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:22.596,ns_1@10.242.238.88:<0.21214.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 380) [ns_server:info,2014-08-19T16:51:22.596,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 375 state to active [rebalance:info,2014-08-19T16:51:22.597,ns_1@10.242.238.88:<0.18664.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:22.597,ns_1@10.242.238.88:<0.19026.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 375 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:22.598,ns_1@10.242.238.88:<0.19026.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:51:22.600,ns_1@10.242.238.88:<0.18672.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_380_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:22.600,ns_1@10.242.238.88:<0.18664.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:51:22.603,ns_1@10.242.238.88:<0.18664.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 380 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.21221.1> [ns_server:info,2014-08-19T16:51:22.604,ns_1@10.242.238.88:<0.21221.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 380 to state replica [ns_server:debug,2014-08-19T16:51:22.608,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 22. Nacking mccouch update. [views:debug,2014-08-19T16:51:22.608,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/22. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:22.608,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",22,active,0} [ns_server:debug,2014-08-19T16:51:22.609,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,24,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,22,150,878,73,567, 256,201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515, 332,21,149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774, 591,280,225,825,514,331,148,876,71,565,382,199,799,616,305,250,122,850,539, 45,356,173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798,615, 304,249,121,849,538,44,355,172,95,772,589,278,223,823,512,329,146,874,69,563, 380,197,797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822,639, 328,17,145,873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93,770, 587,276,221,821,638,327,144,872,67,561,378,195,795,612,301,246,118,846,535, 41,352,169,92,769,586,275,220,820,637,326,15,143,871,66,560,377,194,794,611, 300,245,117,845,534,40,351,168,91,768,585,274,219,819,636,325,142,870,65,559, 376,193,793,610,299,244,116,844,533,39,350,167,90,895,831,584,520,337,273,26, 218,154,882,818,77,635,571,324,260,205,141,13,869,64,558,375,192,792,609,298, 243,115,843,532,38,349,166,894,89,583,272,217,817,634,323,140,868,63,557,374, 191,791,608,297,242,114,842,531,37,348,165,893,88,582,271,216,816,633,322, 139,11,867,62,556,373,190,790,607,296,241,113,841,530,36,347,164,892,87,581, 270,215,815,632,321,138,866,61,555,372,189,789,606,295,240,112,840,529,35, 346,163,891,86,580,269,214,9,814,631,320,137,865,60,554,371,188,788,605,294, 239,111,839,528,345,34,162,890,85,579,268,213,813,630,319,136,864,59,553,370, 187,787,604,293,238,110,838,527,344,33,161,889,84,578,267,212,812,7,629,318, 135,863,58,552,369,186,786,603,292,237,109,837,526,343,32,160,888,83,577,266, 211,811,628,317,134,862,57,551,368,185,785,602,291,236,108,836,525,342,31, 159,887,82,576,265,210,810,627,5,316,133,861,56,550,367,184,784,601,290,235, 107,835,524,341,30,158,886,81,575,264,209,809,626,315,132,860,55,549,366,183, 783,600,289,234,106,834,523,340,29,157,885,80,574,263,208,808,625,314,3,131, 859,548,54,365,182,782,599,288,233,105,833,522,339,28,156,884,79,573,262,207, 807,624,313,130,858,547,53,364,181,781,598,287,232,104,832,521,338,27,155, 883,78,572,261,206,806,623,312,129,1,857,546,52,363,180,780,597,286,231,103] [rebalance:info,2014-08-19T16:51:22.629,ns_1@10.242.238.88:<0.18881.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 377 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:22.629,ns_1@10.242.238.88:<0.21222.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 382) [ns_server:info,2014-08-19T16:51:22.630,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 377 state to active [ns_server:debug,2014-08-19T16:51:22.630,ns_1@10.242.238.88:<0.21221.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_380 [rebalance:info,2014-08-19T16:51:22.630,ns_1@10.242.238.88:<0.18524.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:22.631,ns_1@10.242.238.88:<0.18881.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 377 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:22.631,ns_1@10.242.238.88:<0.18881.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:22.632,ns_1@10.242.238.88:<0.21221.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[380]}, {checkpoints,[{380,1}]}, {name,<<"rebalance_380">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[380]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"380"}]} [rebalance:debug,2014-08-19T16:51:22.632,ns_1@10.242.238.88:<0.21221.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21227.1> [rebalance:info,2014-08-19T16:51:22.633,ns_1@10.242.238.88:<0.21221.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:51:22.634,ns_1@10.242.238.88:<0.18532.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_382_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:22.634,ns_1@10.242.238.88:<0.18524.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:debug,2014-08-19T16:51:22.635,ns_1@10.242.238.88:<0.21221.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:22.635,ns_1@10.242.238.88:<0.21221.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:22.636,ns_1@10.242.238.88:<0.18664.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 380 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:22.638,ns_1@10.242.238.88:<0.18524.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 382 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.21230.1> [rebalance:debug,2014-08-19T16:51:22.638,ns_1@10.242.238.88:<0.18672.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:22.638,ns_1@10.242.238.88:<0.21230.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 382 to state replica [views:debug,2014-08-19T16:51:22.644,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/22. Updated state: active (0) [ns_server:info,2014-08-19T16:51:22.644,ns_1@10.242.238.88:<0.18672.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_380_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:51:22.644,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",22,active,0} [rebalance:info,2014-08-19T16:51:22.645,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 380 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:22.645,ns_1@10.242.238.88:<0.21234.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 380 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [rebalance:info,2014-08-19T16:51:22.663,ns_1@10.242.238.88:<0.18727.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 379 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:22.663,ns_1@10.242.238.88:<0.19855.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 620 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:22.663,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 379 state to active [rebalance:info,2014-08-19T16:51:22.665,ns_1@10.242.238.88:<0.18727.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 379 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:22.665,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 620 state to active [rebalance:info,2014-08-19T16:51:22.666,ns_1@10.242.238.88:<0.19855.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 620 on ns_1@10.242.238.88 [ns_server:debug,2014-08-19T16:51:22.666,ns_1@10.242.238.88:<0.21230.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_382 [rebalance:info,2014-08-19T16:51:22.666,ns_1@10.242.238.88:<0.18727.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:22.667,ns_1@10.242.238.88:<0.19855.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:22.668,ns_1@10.242.238.88:<0.21230.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[382]}, {checkpoints,[{382,1}]}, {name,<<"rebalance_382">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[382]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"382"}]} [rebalance:debug,2014-08-19T16:51:22.668,ns_1@10.242.238.88:<0.21230.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21243.1> [rebalance:info,2014-08-19T16:51:22.669,ns_1@10.242.238.88:<0.21230.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:51:22.671,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:22.671,ns_1@10.242.238.88:<0.21230.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:22.672,ns_1@10.242.238.88:<0.21230.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:51:22.672,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{380, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:22.673,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:22.673,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:22.673,ns_1@10.242.238.88:<0.18524.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 382 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:22.673,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:22.674,ns_1@10.242.238.88:<0.18532.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:22.678,ns_1@10.242.238.88:<0.18532.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_382_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:22.684,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 380 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:22.685,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 380) [ns_server:debug,2014-08-19T16:51:22.686,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:22.686,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 382 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:22.686,ns_1@10.242.238.88:<0.21256.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 382 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:22.704,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:22.704,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:22.705,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:22.705,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{382, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:22.705,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:22.710,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 382 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:22.711,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 382) [ns_server:debug,2014-08-19T16:51:22.712,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:51:22.743,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 20. Nacking mccouch update. [views:debug,2014-08-19T16:51:22.744,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/20. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:22.744,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",20,active,0} [ns_server:debug,2014-08-19T16:51:22.745,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,24,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,22,150,878,73,567, 256,201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515, 332,21,149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774, 591,280,225,825,514,331,20,148,876,71,565,382,199,799,616,305,250,122,850, 539,45,356,173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798, 615,304,249,121,849,538,44,355,172,95,772,589,278,223,823,512,329,146,874,69, 563,380,197,797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822, 639,328,17,145,873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93, 770,587,276,221,821,638,327,144,872,67,561,378,195,795,612,301,246,118,846, 535,41,352,169,92,769,586,275,220,820,637,326,15,143,871,66,560,377,194,794, 611,300,245,117,845,534,40,351,168,91,768,585,274,219,819,636,325,142,870,65, 559,376,193,793,610,299,244,116,844,533,39,350,167,90,895,831,584,520,337, 273,26,218,154,882,818,77,635,571,324,260,205,141,13,869,64,558,375,192,792, 609,298,243,115,843,532,38,349,166,894,89,583,272,217,817,634,323,140,868,63, 557,374,191,791,608,297,242,114,842,531,37,348,165,893,88,582,271,216,816, 633,322,139,11,867,62,556,373,190,790,607,296,241,113,841,530,36,347,164,892, 87,581,270,215,815,632,321,138,866,61,555,372,189,789,606,295,240,112,840, 529,35,346,163,891,86,580,269,214,9,814,631,320,137,865,60,554,371,188,788, 605,294,239,111,839,528,345,34,162,890,85,579,268,213,813,630,319,136,864,59, 553,370,187,787,604,293,238,110,838,527,344,33,161,889,84,578,267,212,812,7, 629,318,135,863,58,552,369,186,786,603,292,237,109,837,526,343,32,160,888,83, 577,266,211,811,628,317,134,862,57,551,368,185,785,602,291,236,108,836,525, 342,31,159,887,82,576,265,210,810,627,5,316,133,861,56,550,367,184,784,601, 290,235,107,835,524,341,30,158,886,81,575,264,209,809,626,315,132,860,55,549, 366,183,783,600,289,234,106,834,523,340,29,157,885,80,574,263,208,808,625, 314,3,131,859,548,54,365,182,782,599,288,233,105,833,522,339,28,156,884,79, 573,262,207,807,624,313,130,858,547,53,364,181,781,598,287,232,104,832,521, 338,27,155,883,78,572,261,206,806,623,312,129,1,857,546,52,363,180,780,597, 286,231,103] [views:debug,2014-08-19T16:51:22.777,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/20. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:22.777,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",20,active,0} [rebalance:info,2014-08-19T16:51:22.790,ns_1@10.242.238.88:<0.21280.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 381) [rebalance:info,2014-08-19T16:51:22.790,ns_1@10.242.238.88:<0.19682.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 622 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:22.791,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 622 state to active [rebalance:info,2014-08-19T16:51:22.791,ns_1@10.242.238.88:<0.18587.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:22.792,ns_1@10.242.238.88:<0.19682.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 622 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:22.792,ns_1@10.242.238.88:<0.19682.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:51:22.794,ns_1@10.242.238.88:<0.18595.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_381_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:22.794,ns_1@10.242.238.88:<0.18587.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:51:22.797,ns_1@10.242.238.88:<0.18587.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 381 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.21287.1> [ns_server:info,2014-08-19T16:51:22.798,ns_1@10.242.238.88:<0.21287.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 381 to state replica [ns_server:debug,2014-08-19T16:51:22.822,ns_1@10.242.238.88:<0.21287.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_381 [rebalance:info,2014-08-19T16:51:22.823,ns_1@10.242.238.88:<0.21287.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[381]}, {checkpoints,[{381,1}]}, {name,<<"rebalance_381">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[381]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"381"}]} [rebalance:debug,2014-08-19T16:51:22.824,ns_1@10.242.238.88:<0.21287.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21296.1> [rebalance:info,2014-08-19T16:51:22.825,ns_1@10.242.238.88:<0.21287.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:22.827,ns_1@10.242.238.88:<0.21287.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:22.827,ns_1@10.242.238.88:<0.21287.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:22.828,ns_1@10.242.238.88:<0.18587.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 381 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:22.830,ns_1@10.242.238.88:<0.18595.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:22.833,ns_1@10.242.238.88:<0.18595.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_381_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:22.834,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 381 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:22.835,ns_1@10.242.238.88:<0.21306.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 381 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:22.859,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:22.860,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{381, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:22.860,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:22.860,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:22.860,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:22.873,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 381 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:22.874,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 381) [ns_server:debug,2014-08-19T16:51:22.874,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:22.880,ns_1@10.242.238.88:<0.21316.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 383) [rebalance:info,2014-08-19T16:51:22.880,ns_1@10.242.238.88:<0.19542.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 624 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:22.880,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 624 state to active [rebalance:info,2014-08-19T16:51:22.881,ns_1@10.242.238.88:<0.18447.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:22.882,ns_1@10.242.238.88:<0.19542.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 624 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:22.882,ns_1@10.242.238.88:<0.19542.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:51:22.884,ns_1@10.242.238.88:<0.18455.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_383_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:22.884,ns_1@10.242.238.88:<0.18447.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:51:22.886,ns_1@10.242.238.88:<0.18447.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 383 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.21323.1> [ns_server:info,2014-08-19T16:51:22.887,ns_1@10.242.238.88:<0.21323.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 383 to state replica [ns_server:debug,2014-08-19T16:51:22.910,ns_1@10.242.238.88:<0.21323.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_383 [ns_server:debug,2014-08-19T16:51:22.911,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 18. Nacking mccouch update. [views:debug,2014-08-19T16:51:22.911,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/18. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:22.911,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",18,active,0} [rebalance:info,2014-08-19T16:51:22.911,ns_1@10.242.238.88:<0.21323.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[383]}, {checkpoints,[{383,1}]}, {name,<<"rebalance_383">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[383]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"383"}]} [rebalance:debug,2014-08-19T16:51:22.912,ns_1@10.242.238.88:<0.21323.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21324.1> [rebalance:info,2014-08-19T16:51:22.913,ns_1@10.242.238.88:<0.21323.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:51:22.913,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,24,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,22,150,878,73,567, 256,201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515, 332,21,149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774, 591,280,225,825,514,331,20,148,876,71,565,382,199,799,616,305,250,122,850, 539,45,356,173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798, 615,304,249,121,849,538,44,355,172,95,772,589,278,223,823,512,329,18,146,874, 69,563,380,197,797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822, 639,328,17,145,873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93, 770,587,276,221,821,638,327,144,872,67,561,378,195,795,612,301,246,118,846, 535,41,352,169,92,769,586,275,220,820,637,326,15,143,871,66,560,377,194,794, 611,300,245,117,845,534,40,351,168,91,768,585,274,219,819,636,325,142,870,65, 559,376,193,793,610,299,244,116,844,533,39,350,167,90,895,584,273,218,882, 818,77,635,571,324,260,205,141,13,869,64,558,375,192,792,609,298,243,115,843, 532,38,349,166,894,89,583,272,217,817,634,323,140,868,63,557,374,191,791,608, 297,242,114,842,531,37,348,165,893,88,582,271,216,816,633,322,139,11,867,62, 556,373,190,790,607,296,241,113,841,530,36,347,164,892,87,581,270,215,815, 632,321,138,866,61,555,372,189,789,606,295,240,112,840,529,35,346,163,891,86, 580,269,214,9,814,631,320,137,865,60,554,371,188,788,605,294,239,111,839,528, 345,34,162,890,85,579,268,213,813,630,319,136,864,59,553,370,187,787,604,293, 238,110,838,527,344,33,161,889,84,578,267,212,812,7,629,318,135,863,58,552, 369,186,786,603,292,237,109,837,526,343,32,160,888,83,577,266,211,811,628, 317,134,862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,82,576, 265,210,810,627,5,316,133,861,56,550,367,184,784,601,290,235,107,835,524,341, 30,158,886,81,575,264,209,809,626,315,132,860,55,549,366,183,783,600,289,234, 106,834,523,340,29,157,885,80,574,263,208,808,625,314,3,131,859,548,54,365, 182,782,599,288,233,105,833,522,339,28,156,884,79,573,262,207,807,624,313, 130,858,547,53,364,181,781,598,287,232,104,832,521,338,27,155,883,78,572,261, 206,806,623,312,129,1,857,546,52,363,180,780,597,286,231,103,831,520,337,26, 154] [rebalance:debug,2014-08-19T16:51:22.915,ns_1@10.242.238.88:<0.21323.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:22.915,ns_1@10.242.238.88:<0.21323.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:22.915,ns_1@10.242.238.88:<0.18447.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 383 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:22.918,ns_1@10.242.238.88:<0.18455.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:22.921,ns_1@10.242.238.88:<0.18455.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_383_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:22.921,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 383 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:22.922,ns_1@10.242.238.88:<0.21328.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 383 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:22.950,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:22.951,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:22.951,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:22.951,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{383, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:22.952,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:22.949,ns_1@10.242.238.88:<0.25608.0>:ns_rebalance_observer:docs_left_updater_loop:347]Starting docs_left_updater_loop:"default" [{move_state,874, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_874_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_874_'ns_1@10.242.238.89'">>}]}, {move_state,363, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_363_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_363_'ns_1@10.242.238.90'">>}]}, {move_state,619, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_619_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_619_'ns_1@10.242.238.89'">>}]}, {move_state,875, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_875_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_875_'ns_1@10.242.238.89'">>}]}, {move_state,364, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_364_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_364_'ns_1@10.242.238.90'">>}]}, {move_state,620, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_620_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_620_'ns_1@10.242.238.89'">>}]}, {move_state,876, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_876_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_876_'ns_1@10.242.238.89'">>}]}, {move_state,365, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_365_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_365_'ns_1@10.242.238.90'">>}]}, {move_state,621, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_621_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_621_'ns_1@10.242.238.89'">>}]}, {move_state,877, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_877_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_877_'ns_1@10.242.238.89'">>}]}, {move_state,366, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_366_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_366_'ns_1@10.242.238.90'">>}]}, {move_state,622, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_622_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_622_'ns_1@10.242.238.89'">>}]}, {move_state,878, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_878_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_878_'ns_1@10.242.238.89'">>}]}, {move_state,367, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_367_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_367_'ns_1@10.242.238.90'">>}]}, {move_state,623, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_623_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_623_'ns_1@10.242.238.89'">>}]}, {move_state,879, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_879_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_879_'ns_1@10.242.238.89'">>}]}, {move_state,368, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_368_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_368_'ns_1@10.242.238.90'">>}]}, {move_state,624, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_624_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_624_'ns_1@10.242.238.89'">>}]}, {move_state,880, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_880_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_880_'ns_1@10.242.238.89'">>}]}, {move_state,369, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_369_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_369_'ns_1@10.242.238.90'">>}]}, {move_state,625, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_625_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_625_'ns_1@10.242.238.89'">>}]}, {move_state,881, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_881_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_881_'ns_1@10.242.238.89'">>}]}, {move_state,370, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_370_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_370_'ns_1@10.242.238.90'">>}]}, {move_state,626, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_626_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_626_'ns_1@10.242.238.89'">>}]}, {move_state,882, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_882_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_882_'ns_1@10.242.238.89'">>}]}, {move_state,371, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_371_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_371_'ns_1@10.242.238.90'">>}]}, {move_state,627, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_627_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_627_'ns_1@10.242.238.89'">>}]}, {move_state,883, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_883_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_883_'ns_1@10.242.238.89'">>}]}, {move_state,372, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_372_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_372_'ns_1@10.242.238.90'">>}]}, {move_state,628, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_628_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_628_'ns_1@10.242.238.89'">>}]}, {move_state,884, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_884_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_884_'ns_1@10.242.238.89'">>}]}, {move_state,373, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_373_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_373_'ns_1@10.242.238.90'">>}]}, {move_state,629, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_629_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_629_'ns_1@10.242.238.89'">>}]}, {move_state,885, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_885_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_885_'ns_1@10.242.238.89'">>}]}, {move_state,374, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_374_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_374_'ns_1@10.242.238.90'">>}]}, {move_state,630, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_630_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_630_'ns_1@10.242.238.89'">>}]}, {move_state,886, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_886_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_886_'ns_1@10.242.238.89'">>}]}, {move_state,375, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_375_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_375_'ns_1@10.242.238.90'">>}]}, {move_state,631, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_631_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_631_'ns_1@10.242.238.89'">>}]}, {move_state,887, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_887_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_887_'ns_1@10.242.238.89'">>}]}, {move_state,376, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_376_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_376_'ns_1@10.242.238.90'">>}]}, {move_state,632, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_632_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_632_'ns_1@10.242.238.89'">>}]}, {move_state,888, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_888_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_888_'ns_1@10.242.238.89'">>}]}, {move_state,377, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_377_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_377_'ns_1@10.242.238.90'">>}]}, {move_state,633, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_633_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_633_'ns_1@10.242.238.89'">>}]}, {move_state,889, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_889_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_889_'ns_1@10.242.238.89'">>}]}, {move_state,378, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_378_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_378_'ns_1@10.242.238.90'">>}]}, {move_state,634, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_634_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_634_'ns_1@10.242.238.89'">>}]}, {move_state,890, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_890_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_890_'ns_1@10.242.238.89'">>}]}, {move_state,379, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_379_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_379_'ns_1@10.242.238.90'">>}]}, {move_state,635, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_635_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_635_'ns_1@10.242.238.89'">>}]}, {move_state,891, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_891_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_891_'ns_1@10.242.238.89'">>}]}, {move_state,636, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_636_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_636_'ns_1@10.242.238.89'">>}]}, {move_state,892, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_892_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_892_'ns_1@10.242.238.89'">>}]}, {move_state,637, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_637_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_637_'ns_1@10.242.238.89'">>}]}, {move_state,893, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_893_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_893_'ns_1@10.242.238.89'">>}]}, {move_state,638, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_638_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_638_'ns_1@10.242.238.89'">>}]}, {move_state,894, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_894_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_894_'ns_1@10.242.238.89'">>}]}, {move_state,383, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90'], [{replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_383_'ns_1@10.242.238.89'">>}, {replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_383_'ns_1@10.242.238.90'">>}]}, {move_state,895, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.91',0,0, <<"replication_building_895_'ns_1@10.242.238.91'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_895_'ns_1@10.242.238.89'">>}]}, {move_state,639, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89'], [{replica_building_stats,'ns_1@10.242.238.90',0,0, <<"replication_building_639_'ns_1@10.242.238.90'">>}, {replica_building_stats,'ns_1@10.242.238.89',0,0, <<"replication_building_639_'ns_1@10.242.238.89'">>}]}] [ns_server:debug,2014-08-19T16:51:22.955,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 874, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 363, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:22.956,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 619, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.957,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 875, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 364, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:22.958,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 620, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [rebalance:info,2014-08-19T16:51:22.959,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 383 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:22.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 876, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.959,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 383) [ns_server:debug,2014-08-19T16:51:22.960,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:51:22.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 365, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:22.961,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 621, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 877, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.962,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 366, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:22.963,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 622, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 878, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.964,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 367, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:22.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 623, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.965,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 879, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 368, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:22.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 624, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.966,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 880, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 369, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:22.967,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 625, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 881, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 370, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:22.968,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 626, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 882, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.969,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 371, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:22.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 627, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.970,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 883, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 372, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:22.971,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 628, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 884, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 373, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:22.972,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 629, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 885, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.973,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 374, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:22.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 630, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.974,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 886, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 375, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:22.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 631, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.975,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 887, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 376, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:22.976,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 632, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 888, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.977,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 377, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:22.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 633, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 889, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.978,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 378, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:22.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 634, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.979,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 890, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 379, [{'ns_1@10.242.238.89',0},{'ns_1@10.242.238.90',0}] [ns_server:debug,2014-08-19T16:51:22.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 635, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.980,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 891, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 636, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.981,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 892, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 637, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.982,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 893, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 638, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.983,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 894, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 895, [{'ns_1@10.242.238.91',0},{'ns_1@10.242.238.89',0}] [ns_server:debug,2014-08-19T16:51:22.984,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_cast:110]Got update_stats: 639, [{'ns_1@10.242.238.90',0},{'ns_1@10.242.238.89',0}] [views:debug,2014-08-19T16:51:22.995,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/18. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:22.995,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",18,active,0} [rebalance:info,2014-08-19T16:51:23.097,ns_1@10.242.238.88:<0.19388.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 626 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:23.097,ns_1@10.242.238.88:<0.20003.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 619 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:23.098,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 626 state to active [rebalance:info,2014-08-19T16:51:23.099,ns_1@10.242.238.88:<0.19388.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 626 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:23.099,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 619 state to active [rebalance:info,2014-08-19T16:51:23.100,ns_1@10.242.238.88:<0.20003.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 619 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:23.100,ns_1@10.242.238.88:<0.19388.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:23.101,ns_1@10.242.238.88:<0.20003.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:23.178,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 16. Nacking mccouch update. [views:debug,2014-08-19T16:51:23.178,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/16. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:23.179,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",16,active,0} [ns_server:debug,2014-08-19T16:51:23.180,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,24,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,22,150,878,73,567, 256,201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515, 332,21,149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774, 591,280,225,825,514,331,20,148,876,71,565,382,199,799,616,305,250,122,850, 539,45,356,173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798, 615,304,249,121,849,538,44,355,172,95,772,589,278,223,823,512,329,18,146,874, 69,563,380,197,797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822, 639,328,17,145,873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93, 770,587,276,221,821,638,327,16,144,872,67,561,378,195,795,612,301,246,118, 846,535,41,352,169,92,769,586,275,220,820,637,326,15,143,871,66,560,377,194, 794,611,300,245,117,845,534,40,351,168,91,768,585,274,219,819,636,325,142, 870,65,559,376,193,793,610,299,244,116,844,533,39,350,167,90,895,584,273,218, 882,818,77,635,571,324,260,205,141,13,869,64,558,375,192,792,609,298,243,115, 843,532,38,349,166,894,89,583,272,217,817,634,323,140,868,63,557,374,191,791, 608,297,242,114,842,531,37,348,165,893,88,582,271,216,816,633,322,139,11,867, 62,556,373,190,790,607,296,241,113,841,530,36,347,164,892,87,581,270,215,815, 632,321,138,866,61,555,372,189,789,606,295,240,112,840,529,35,346,163,891,86, 580,269,214,9,814,631,320,137,865,60,554,371,188,788,605,294,239,111,839,528, 345,34,162,890,85,579,268,213,813,630,319,136,864,59,553,370,187,787,604,293, 238,110,838,527,344,33,161,889,84,578,267,212,812,7,629,318,135,863,58,552, 369,186,786,603,292,237,109,837,526,343,32,160,888,83,577,266,211,811,628, 317,134,862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,82,576, 265,210,810,627,5,316,133,861,56,550,367,184,784,601,290,235,107,835,524,341, 30,158,886,81,575,264,209,809,626,315,132,860,55,549,366,183,783,600,289,234, 106,834,523,340,29,157,885,80,574,263,208,808,625,314,3,131,859,548,54,365, 182,782,599,288,233,105,833,522,339,28,156,884,79,573,262,207,807,624,313, 130,858,547,53,364,181,781,598,287,232,104,832,521,338,27,155,883,78,572,261, 206,806,623,312,129,1,857,546,52,363,180,780,597,286,231,103,831,520,337,26, 154] [rebalance:info,2014-08-19T16:51:23.247,ns_1@10.242.238.88:<0.19765.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 621 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:23.247,ns_1@10.242.238.88:<0.19248.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 628 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:23.248,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 621 state to active [rebalance:info,2014-08-19T16:51:23.249,ns_1@10.242.238.88:<0.19765.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 621 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:23.249,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 628 state to active [rebalance:info,2014-08-19T16:51:23.250,ns_1@10.242.238.88:<0.19248.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 628 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:23.250,ns_1@10.242.238.88:<0.19765.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:23.251,ns_1@10.242.238.88:<0.19248.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:51:23.262,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/16. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:23.262,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",16,active,0} [rebalance:info,2014-08-19T16:51:23.398,ns_1@10.242.238.88:<0.19090.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 630 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:23.398,ns_1@10.242.238.88:<0.19605.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 623 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:23.398,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 630 state to active [rebalance:info,2014-08-19T16:51:23.399,ns_1@10.242.238.88:<0.19090.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 630 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:23.399,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 623 state to active [rebalance:info,2014-08-19T16:51:23.400,ns_1@10.242.238.88:<0.19605.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 623 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:23.401,ns_1@10.242.238.88:<0.19090.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:23.401,ns_1@10.242.238.88:<0.19605.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:23.429,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 14. Nacking mccouch update. [views:debug,2014-08-19T16:51:23.429,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/14. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:23.430,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",14,active,0} [ns_server:debug,2014-08-19T16:51:23.431,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,24,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,22,150,878,73,567, 256,201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515, 332,21,149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774, 591,280,225,825,514,331,20,148,876,71,565,382,199,799,616,305,250,122,850, 539,45,356,173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798, 615,304,249,121,849,538,44,355,172,95,772,589,278,223,823,512,329,18,146,874, 69,563,380,197,797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822, 639,328,17,145,873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93, 770,587,276,221,821,638,327,16,144,872,67,561,378,195,795,612,301,246,118, 846,535,41,352,169,92,769,586,275,220,820,637,326,15,143,871,66,560,377,194, 794,611,300,245,117,845,534,40,351,168,91,768,585,274,219,819,636,325,142,14, 870,65,559,376,193,793,610,299,244,116,844,533,39,350,167,90,895,584,273,218, 882,818,77,635,571,324,260,205,141,13,869,64,558,375,192,792,609,298,243,115, 843,532,38,349,166,894,89,583,272,217,817,634,323,140,868,63,557,374,191,791, 608,297,242,114,842,531,37,348,165,893,88,582,271,216,816,633,322,139,11,867, 62,556,373,190,790,607,296,241,113,841,530,36,347,164,892,87,581,270,215,815, 632,321,138,866,61,555,372,189,789,606,295,240,112,840,529,35,346,163,891,86, 580,269,214,9,814,631,320,137,865,60,554,371,188,788,605,294,239,111,839,528, 345,34,162,890,85,579,268,213,813,630,319,136,864,59,553,370,187,787,604,293, 238,110,838,527,344,33,161,889,84,578,267,212,812,7,629,318,135,863,58,552, 369,186,786,603,292,237,109,837,526,343,32,160,888,83,577,266,211,811,628, 317,134,862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,82,576, 265,210,810,627,5,316,133,861,56,550,367,184,784,601,290,235,107,835,524,341, 30,158,886,81,575,264,209,809,626,315,132,860,55,549,366,183,783,600,289,234, 106,834,523,340,29,157,885,80,574,263,208,808,625,314,3,131,859,548,54,365, 182,782,599,288,233,105,833,522,339,28,156,884,79,573,262,207,807,624,313, 130,858,547,53,364,181,781,598,287,232,104,832,521,338,27,155,883,78,572,261, 206,806,623,312,129,1,857,546,52,363,180,780,597,286,231,103,831,520,337,26, 154] [views:debug,2014-08-19T16:51:23.505,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/14. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:23.505,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",14,active,0} [rebalance:info,2014-08-19T16:51:23.540,ns_1@10.242.238.88:<0.18923.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 632 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:23.540,ns_1@10.242.238.88:<0.19465.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 625 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:23.540,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 632 state to active [rebalance:info,2014-08-19T16:51:23.541,ns_1@10.242.238.88:<0.18923.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 632 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:23.541,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 625 state to active [rebalance:info,2014-08-19T16:51:23.542,ns_1@10.242.238.88:<0.19465.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 625 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:23.543,ns_1@10.242.238.88:<0.18923.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:23.543,ns_1@10.242.238.88:<0.19465.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:23.646,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 12. Nacking mccouch update. [views:debug,2014-08-19T16:51:23.647,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/12. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:23.647,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",12,active,0} [ns_server:debug,2014-08-19T16:51:23.648,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,24,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,22,150,878,73,567, 256,201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515, 332,21,149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774, 591,280,225,825,514,331,20,148,876,71,565,382,199,799,616,305,250,122,850, 539,45,356,173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798, 615,304,249,121,849,538,44,355,172,95,772,589,278,223,823,512,329,18,146,874, 69,563,380,197,797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822, 639,328,17,145,873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93, 770,587,276,221,821,638,327,16,144,872,67,561,378,195,795,612,301,246,118, 846,535,41,352,169,92,769,586,275,220,820,637,326,15,143,871,66,560,377,194, 794,611,300,245,117,845,534,40,351,168,91,768,585,274,219,819,636,325,142,14, 870,65,559,376,193,793,610,299,244,116,844,533,39,350,167,90,895,584,273,218, 882,818,77,635,571,324,260,205,141,13,869,64,558,375,192,792,609,298,243,115, 843,532,38,349,166,894,89,583,272,217,817,634,323,140,12,868,63,557,374,191, 791,608,297,242,114,842,531,37,348,165,893,88,582,271,216,816,633,322,139,11, 867,62,556,373,190,790,607,296,241,113,841,530,36,347,164,892,87,581,270,215, 815,632,321,138,866,61,555,372,189,789,606,295,240,112,840,529,35,346,163, 891,86,580,269,214,9,814,631,320,137,865,60,554,371,188,788,605,294,239,111, 839,528,345,34,162,890,85,579,268,213,813,630,319,136,864,59,553,370,187,787, 604,293,238,110,838,527,344,33,161,889,84,578,267,212,812,7,629,318,135,863, 58,552,369,186,786,603,292,237,109,837,526,343,32,160,888,83,577,266,211,811, 628,317,134,862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,82, 576,265,210,810,627,5,316,133,861,56,550,367,184,784,601,290,235,107,835,524, 341,30,158,886,81,575,264,209,809,626,315,132,860,55,549,366,183,783,600,289, 234,106,834,523,340,29,157,885,80,574,263,208,808,625,314,3,131,859,548,54, 365,182,782,599,288,233,105,833,522,339,28,156,884,79,573,262,207,807,624, 313,130,858,547,53,364,181,781,598,287,232,104,832,521,338,27,155,883,78,572, 261,206,806,623,312,129,1,857,546,52,363,180,780,597,286,231,103,831,520,337, 26,154] [rebalance:info,2014-08-19T16:51:23.698,ns_1@10.242.238.88:<0.19325.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 627 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:23.698,ns_1@10.242.238.88:<0.21479.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 634) [ns_server:info,2014-08-19T16:51:23.699,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 627 state to active [rebalance:info,2014-08-19T16:51:23.699,ns_1@10.242.238.88:<0.18783.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:23.700,ns_1@10.242.238.88:<0.19325.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 627 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:23.701,ns_1@10.242.238.88:<0.19325.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:51:23.703,ns_1@10.242.238.88:<0.18791.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_634_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:23.703,ns_1@10.242.238.88:<0.18783.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:51:23.705,ns_1@10.242.238.88:<0.18783.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 634 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.21486.1> [ns_server:info,2014-08-19T16:51:23.706,ns_1@10.242.238.88:<0.21486.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 634 to state replica [views:debug,2014-08-19T16:51:23.722,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/12. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:23.723,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",12,active,0} [ns_server:debug,2014-08-19T16:51:23.731,ns_1@10.242.238.88:<0.21486.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_634 [rebalance:info,2014-08-19T16:51:23.732,ns_1@10.242.238.88:<0.21486.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[634]}, {checkpoints,[{634,1}]}, {name,<<"rebalance_634">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[634]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"634"}]} [rebalance:debug,2014-08-19T16:51:23.733,ns_1@10.242.238.88:<0.21486.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21487.1> [rebalance:info,2014-08-19T16:51:23.734,ns_1@10.242.238.88:<0.21486.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:23.736,ns_1@10.242.238.88:<0.21486.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:23.736,ns_1@10.242.238.88:<0.21486.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:23.737,ns_1@10.242.238.88:<0.18783.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 634 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:23.739,ns_1@10.242.238.88:<0.18791.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:23.743,ns_1@10.242.238.88:<0.18791.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_634_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:23.743,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 634 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:23.743,ns_1@10.242.238.88:<0.21491.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 634 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:23.768,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:23.769,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:23.770,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:23.770,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:23.770,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{634, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:51:23.774,ns_1@10.242.238.88:<0.21500.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 636) [rebalance:info,2014-08-19T16:51:23.774,ns_1@10.242.238.88:<0.19161.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 629 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:23.774,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 629 state to active [rebalance:info,2014-08-19T16:51:23.775,ns_1@10.242.238.88:<0.18643.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:23.776,ns_1@10.242.238.88:<0.19161.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 629 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:23.776,ns_1@10.242.238.88:<0.19161.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:23.777,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 634 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:23.777,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 634) [ns_server:debug,2014-08-19T16:51:23.778,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:info,2014-08-19T16:51:23.781,ns_1@10.242.238.88:<0.18651.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_636_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:23.781,ns_1@10.242.238.88:<0.18643.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:51:23.784,ns_1@10.242.238.88:<0.18643.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 636 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.21522.1> [ns_server:info,2014-08-19T16:51:23.784,ns_1@10.242.238.88:<0.21522.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 636 to state replica [ns_server:debug,2014-08-19T16:51:23.808,ns_1@10.242.238.88:<0.21522.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_636 [rebalance:info,2014-08-19T16:51:23.809,ns_1@10.242.238.88:<0.21522.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[636]}, {checkpoints,[{636,1}]}, {name,<<"rebalance_636">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[636]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"636"}]} [rebalance:debug,2014-08-19T16:51:23.810,ns_1@10.242.238.88:<0.21522.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21523.1> [rebalance:info,2014-08-19T16:51:23.811,ns_1@10.242.238.88:<0.21522.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:23.813,ns_1@10.242.238.88:<0.21522.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:23.813,ns_1@10.242.238.88:<0.21522.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:23.814,ns_1@10.242.238.88:<0.18643.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 636 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:23.816,ns_1@10.242.238.88:<0.18651.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:23.819,ns_1@10.242.238.88:<0.18651.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_636_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:23.820,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 636 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:23.820,ns_1@10.242.238.88:<0.21527.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 636 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:23.843,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:23.844,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{636, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:23.845,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:23.845,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:23.845,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:23.853,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 636 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:23.853,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 636) [ns_server:debug,2014-08-19T16:51:23.854,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:51:23.856,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 10. Nacking mccouch update. [views:debug,2014-08-19T16:51:23.856,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/10. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:23.856,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",10,active,0} [rebalance:info,2014-08-19T16:51:23.857,ns_1@10.242.238.88:<0.21537.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 638) [rebalance:info,2014-08-19T16:51:23.857,ns_1@10.242.238.88:<0.19005.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 631 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:23.858,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 631 state to active [rebalance:info,2014-08-19T16:51:23.858,ns_1@10.242.238.88:<0.18489.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [ns_server:debug,2014-08-19T16:51:23.858,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,24,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,22,150,878,73,567, 256,201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515, 332,21,149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774, 591,280,225,825,514,331,20,148,876,71,565,382,199,799,616,305,250,122,850, 539,45,356,173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798, 615,304,249,121,849,538,44,355,172,95,772,589,278,223,823,512,329,18,146,874, 69,563,380,197,797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822, 639,328,17,145,873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93, 770,587,276,221,821,638,327,16,144,872,67,561,378,195,795,612,301,246,118, 846,535,41,352,169,92,769,586,275,220,820,637,326,15,143,871,66,560,377,194, 794,611,300,245,117,845,534,40,351,168,91,768,585,274,219,819,636,325,142,14, 870,65,559,376,193,793,610,299,244,116,844,533,39,350,167,90,895,584,273,218, 882,818,77,635,571,324,260,205,141,13,869,64,558,375,192,792,609,298,243,115, 843,532,38,349,166,894,89,583,272,217,817,634,323,140,12,868,63,557,374,191, 791,608,297,242,114,842,531,37,348,165,893,88,582,271,216,816,633,322,139,11, 867,62,556,373,190,790,607,296,241,113,841,530,36,347,164,892,87,581,270,215, 815,632,321,138,10,866,61,555,372,189,789,606,295,240,112,840,529,35,346,163, 891,86,580,269,214,9,814,631,320,137,865,60,554,371,188,788,605,294,239,111, 839,528,345,34,162,890,85,579,268,213,813,630,319,136,864,59,553,370,187,787, 604,293,238,110,838,527,344,33,161,889,84,578,267,212,812,7,629,318,135,863, 58,552,369,186,786,603,292,237,109,837,526,343,32,160,888,83,577,266,211,811, 628,317,134,862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,82, 576,265,210,810,627,5,316,133,861,56,550,367,184,784,601,290,235,107,835,524, 341,30,158,886,81,575,264,209,809,626,315,132,860,55,549,366,183,783,600,289, 234,106,834,523,340,29,157,885,80,574,263,208,808,625,314,3,131,859,548,54, 365,182,782,599,288,233,105,833,522,339,28,156,884,79,573,262,207,807,624, 313,130,858,547,53,364,181,781,598,287,232,104,832,521,338,27,155,883,78,572, 261,206,806,623,312,129,1,857,546,52,363,180,780,597,286,231,103,831,520,337, 26,154] [rebalance:info,2014-08-19T16:51:23.861,ns_1@10.242.238.88:<0.19005.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 631 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:23.861,ns_1@10.242.238.88:<0.19005.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:51:23.861,ns_1@10.242.238.88:<0.18497.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_638_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:23.862,ns_1@10.242.238.88:<0.18489.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:51:23.864,ns_1@10.242.238.88:<0.18489.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 638 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.21544.1> [ns_server:info,2014-08-19T16:51:23.864,ns_1@10.242.238.88:<0.21544.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 638 to state replica [ns_server:debug,2014-08-19T16:51:23.886,ns_1@10.242.238.88:<0.21544.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_638 [rebalance:info,2014-08-19T16:51:23.887,ns_1@10.242.238.88:<0.21544.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[638]}, {checkpoints,[{638,1}]}, {name,<<"rebalance_638">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[638]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"638"}]} [rebalance:debug,2014-08-19T16:51:23.888,ns_1@10.242.238.88:<0.21544.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21545.1> [rebalance:info,2014-08-19T16:51:23.889,ns_1@10.242.238.88:<0.21544.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:23.891,ns_1@10.242.238.88:<0.21544.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:23.891,ns_1@10.242.238.88:<0.21544.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:23.892,ns_1@10.242.238.88:<0.18489.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 638 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:23.894,ns_1@10.242.238.88:<0.18497.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:23.897,ns_1@10.242.238.88:<0.18497.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_638_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:23.897,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 638 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:23.897,ns_1@10.242.238.88:<0.21549.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 638 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:23.917,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:23.917,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:23.918,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:23.918,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:23.918,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{638, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [views:debug,2014-08-19T16:51:23.928,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/10. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:23.928,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",10,active,0} [rebalance:info,2014-08-19T16:51:23.930,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 638 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:23.930,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 638) [ns_server:debug,2014-08-19T16:51:23.931,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:23.941,ns_1@10.242.238.88:<0.20060.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 874 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:23.941,ns_1@10.242.238.88:<0.18846.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 633 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:23.941,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 874 state to active [rebalance:info,2014-08-19T16:51:23.943,ns_1@10.242.238.88:<0.20060.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 874 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:23.943,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 633 state to active [rebalance:info,2014-08-19T16:51:23.944,ns_1@10.242.238.88:<0.18846.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 633 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:23.944,ns_1@10.242.238.88:<0.20060.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:23.944,ns_1@10.242.238.88:<0.18846.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:24.003,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 8. Nacking mccouch update. [views:debug,2014-08-19T16:51:24.003,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/8. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:24.003,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",8,active,0} [ns_server:debug,2014-08-19T16:51:24.004,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,24,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,22,150,878,73,567, 256,201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515, 332,21,149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774, 591,280,225,825,514,331,20,148,876,71,565,382,199,799,616,305,250,122,850, 539,45,356,173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798, 615,304,249,121,849,538,44,355,172,95,772,589,278,223,823,512,329,18,146,874, 69,563,380,197,797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822, 639,328,17,145,873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93, 770,587,276,221,821,638,327,16,144,872,67,561,378,195,795,612,301,246,118, 846,535,41,352,169,92,769,586,275,220,820,637,326,15,143,871,66,560,377,194, 794,611,300,245,117,845,534,40,351,168,91,768,585,274,219,819,636,325,142,14, 870,65,559,376,193,793,610,299,244,116,844,533,39,350,167,90,895,584,273,218, 818,635,324,141,13,869,64,558,375,192,792,609,298,243,115,843,532,38,349,166, 894,89,583,272,217,817,634,323,140,12,868,63,557,374,191,791,608,297,242,114, 842,531,37,348,165,893,88,582,271,216,816,633,322,139,11,867,62,556,373,190, 790,607,296,241,113,841,530,36,347,164,892,87,581,270,215,815,632,321,138,10, 866,61,555,372,189,789,606,295,240,112,840,529,35,346,163,891,86,580,269,214, 9,814,631,320,137,865,60,554,371,188,788,605,294,239,111,839,528,345,34,162, 890,85,579,268,213,813,8,630,319,136,864,59,553,370,187,787,604,293,238,110, 838,527,344,33,161,889,84,578,267,212,812,7,629,318,135,863,58,552,369,186, 786,603,292,237,109,837,526,343,32,160,888,83,577,266,211,811,628,317,134, 862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,82,576,265,210, 810,627,5,316,133,861,56,550,367,184,784,601,290,235,107,835,524,341,30,158, 886,81,575,264,209,809,626,315,132,860,55,549,366,183,783,600,289,234,106, 834,523,340,29,157,885,80,574,263,208,808,625,314,3,131,859,548,54,365,182, 782,599,288,233,105,833,522,339,28,156,884,79,573,262,207,807,624,313,130, 858,547,53,364,181,781,598,287,232,104,832,521,338,27,155,883,78,572,261,206, 806,623,312,129,1,857,546,52,363,180,780,597,286,231,103,831,520,337,26,154, 882,77,571,260,205] [views:debug,2014-08-19T16:51:24.037,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/8. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:24.037,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",8,active,0} [rebalance:info,2014-08-19T16:51:24.065,ns_1@10.242.238.88:<0.19826.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 876 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:24.066,ns_1@10.242.238.88:<0.21581.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 635) [ns_server:info,2014-08-19T16:51:24.066,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 876 state to active [rebalance:info,2014-08-19T16:51:24.067,ns_1@10.242.238.88:<0.18706.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:24.067,ns_1@10.242.238.88:<0.19826.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 876 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:24.068,ns_1@10.242.238.88:<0.19826.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:51:24.070,ns_1@10.242.238.88:<0.18714.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_635_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:24.070,ns_1@10.242.238.88:<0.18706.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:51:24.073,ns_1@10.242.238.88:<0.18706.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 635 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.21593.1> [ns_server:info,2014-08-19T16:51:24.074,ns_1@10.242.238.88:<0.21593.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 635 to state replica [ns_server:debug,2014-08-19T16:51:24.096,ns_1@10.242.238.88:<0.21593.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_635 [rebalance:info,2014-08-19T16:51:24.098,ns_1@10.242.238.88:<0.21593.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[635]}, {checkpoints,[{635,1}]}, {name,<<"rebalance_635">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[635]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"635"}]} [rebalance:debug,2014-08-19T16:51:24.099,ns_1@10.242.238.88:<0.21593.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21603.1> [rebalance:info,2014-08-19T16:51:24.099,ns_1@10.242.238.88:<0.19661.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 878 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:24.099,ns_1@10.242.238.88:<0.21604.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 637) [ns_server:info,2014-08-19T16:51:24.099,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 878 state to active [rebalance:info,2014-08-19T16:51:24.099,ns_1@10.242.238.88:<0.21593.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:51:24.100,ns_1@10.242.238.88:<0.18566.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:24.100,ns_1@10.242.238.88:<0.19661.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 878 on ns_1@10.242.238.88 [rebalance:debug,2014-08-19T16:51:24.101,ns_1@10.242.238.88:<0.21593.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.102,ns_1@10.242.238.88:<0.19661.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:24.102,ns_1@10.242.238.88:<0.21593.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.102,ns_1@10.242.238.88:<0.18706.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 635 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:info,2014-08-19T16:51:24.103,ns_1@10.242.238.88:<0.18574.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_637_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:24.103,ns_1@10.242.238.88:<0.18566.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:debug,2014-08-19T16:51:24.104,ns_1@10.242.238.88:<0.18714.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.106,ns_1@10.242.238.88:<0.18566.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 637 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.21611.1> [ns_server:info,2014-08-19T16:51:24.107,ns_1@10.242.238.88:<0.21611.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 637 to state replica [ns_server:info,2014-08-19T16:51:24.108,ns_1@10.242.238.88:<0.18714.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_635_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.108,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 635 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:24.108,ns_1@10.242.238.88:<0.21615.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 635 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:24.112,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 6. Nacking mccouch update. [views:debug,2014-08-19T16:51:24.112,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/6. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:24.112,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",6,active,0} [ns_server:debug,2014-08-19T16:51:24.114,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,24,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,22,150,878,73,567, 256,201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515, 332,21,149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774, 591,280,225,825,514,331,20,148,876,71,565,382,199,799,616,305,250,122,850, 539,45,356,173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798, 615,304,249,121,849,538,44,355,172,95,772,589,278,223,823,512,329,18,146,874, 69,563,380,197,797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822, 639,328,17,145,873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93, 770,587,276,221,821,638,327,16,144,872,67,561,378,195,795,612,301,246,118, 846,535,41,352,169,92,769,586,275,220,820,637,326,15,143,871,66,560,377,194, 794,611,300,245,117,845,534,40,351,168,91,768,585,274,219,819,636,325,142,14, 870,65,559,376,193,793,610,299,244,116,844,533,39,350,167,90,895,584,273,218, 818,635,324,141,13,869,64,558,375,192,792,609,298,243,115,843,532,38,349,166, 894,89,583,272,217,817,634,323,140,12,868,63,557,374,191,791,608,297,242,114, 842,531,37,348,165,893,88,582,271,216,816,633,322,139,11,867,62,556,373,190, 790,607,296,241,113,841,530,36,347,164,892,87,581,270,215,815,632,321,138,10, 866,61,555,372,189,789,606,295,240,112,840,529,35,346,163,891,86,580,269,214, 9,814,631,320,137,865,60,554,371,188,788,605,294,239,111,839,528,345,34,162, 890,85,579,268,213,813,8,630,319,136,864,59,553,370,187,787,604,293,238,110, 838,527,344,33,161,889,84,578,267,212,812,7,629,318,135,863,58,552,369,186, 786,603,292,237,109,837,526,343,32,160,888,83,577,266,211,811,628,6,317,134, 862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,82,576,265,210, 810,627,5,316,133,861,56,550,367,184,784,601,290,235,107,835,524,341,30,158, 886,81,575,264,209,809,626,315,132,860,55,549,366,183,783,600,289,234,106, 834,523,340,29,157,885,80,574,263,208,808,625,314,3,131,859,548,54,365,182, 782,599,288,233,105,833,522,339,28,156,884,79,573,262,207,807,624,313,130, 858,547,53,364,181,781,598,287,232,104,832,521,338,27,155,883,78,572,261,206, 806,623,312,129,1,857,546,52,363,180,780,597,286,231,103,831,520,337,26,154, 882,77,571,260,205] [ns_server:debug,2014-08-19T16:51:24.127,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.128,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:24.128,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.129,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.129,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{635, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:24.130,ns_1@10.242.238.88:<0.21611.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_637 [rebalance:info,2014-08-19T16:51:24.131,ns_1@10.242.238.88:<0.21611.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[637]}, {checkpoints,[{637,1}]}, {name,<<"rebalance_637">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[637]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"637"}]} [rebalance:debug,2014-08-19T16:51:24.132,ns_1@10.242.238.88:<0.21611.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21624.1> [rebalance:info,2014-08-19T16:51:24.132,ns_1@10.242.238.88:<0.19507.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 880 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:24.132,ns_1@10.242.238.88:<0.21625.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 639) [rebalance:info,2014-08-19T16:51:24.133,ns_1@10.242.238.88:<0.21611.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:51:24.133,ns_1@10.242.238.88:<0.19215.0>:ns_memcached:do_handle_call:527]Changed vbucket 880 state to active [rebalance:info,2014-08-19T16:51:24.134,ns_1@10.242.238.88:<0.19507.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 880 on ns_1@10.242.238.88 [rebalance:debug,2014-08-19T16:51:24.134,ns_1@10.242.238.88:<0.21611.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.134,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 635 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:51:24.134,ns_1@10.242.238.88:<0.18390.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:24.134,ns_1@10.242.238.88:<0.21611.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.135,ns_1@10.242.238.88:<0.19507.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:24.135,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 635) [ns_server:debug,2014-08-19T16:51:24.136,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:24.136,ns_1@10.242.238.88:<0.18566.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 637 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:24.137,ns_1@10.242.238.88:<0.18574.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:24.138,ns_1@10.242.238.88:<0.18398.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_639_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:24.138,ns_1@10.242.238.88:<0.18390.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:51:24.141,ns_1@10.242.238.88:<0.18390.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 639 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.21633.1> [ns_server:info,2014-08-19T16:51:24.141,ns_1@10.242.238.88:<0.18574.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_637_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.141,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 637 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [ns_server:info,2014-08-19T16:51:24.142,ns_1@10.242.238.88:<0.21633.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 639 to state replica [rebalance:info,2014-08-19T16:51:24.142,ns_1@10.242.238.88:<0.21637.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 637 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [views:debug,2014-08-19T16:51:24.146,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/6. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:24.146,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",6,active,0} [ns_server:debug,2014-08-19T16:51:24.162,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.163,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:24.163,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.163,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.164,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{637, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:24.164,ns_1@10.242.238.88:<0.21633.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_639 [rebalance:info,2014-08-19T16:51:24.166,ns_1@10.242.238.88:<0.21633.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[639]}, {checkpoints,[{639,1}]}, {name,<<"rebalance_639">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[639]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"639"}]} [rebalance:info,2014-08-19T16:51:24.166,ns_1@10.242.238.88:<0.19367.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 882 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:24.166,ns_1@10.242.238.88:<0.19960.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 875 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:debug,2014-08-19T16:51:24.166,ns_1@10.242.238.88:<0.21633.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21645.1> [ns_server:info,2014-08-19T16:51:24.166,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 882 state to active [rebalance:info,2014-08-19T16:51:24.167,ns_1@10.242.238.88:<0.21633.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:51:24.167,ns_1@10.242.238.88:<0.19367.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 882 on ns_1@10.242.238.88 [rebalance:debug,2014-08-19T16:51:24.168,ns_1@10.242.238.88:<0.21633.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [ns_server:info,2014-08-19T16:51:24.168,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 875 state to active [rebalance:info,2014-08-19T16:51:24.168,ns_1@10.242.238.88:<0.21633.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.169,ns_1@10.242.238.88:<0.19960.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 875 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:24.170,ns_1@10.242.238.88:<0.19367.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:24.170,ns_1@10.242.238.88:<0.19960.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:24.172,ns_1@10.242.238.88:<0.18390.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 639 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:51:24.173,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 637 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:24.173,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 637) [ns_server:debug,2014-08-19T16:51:24.174,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:debug,2014-08-19T16:51:24.174,ns_1@10.242.238.88:<0.18398.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:24.178,ns_1@10.242.238.88:<0.18398.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_639_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.178,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 639 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:24.178,ns_1@10.242.238.88:<0.21659.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 639 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:debug,2014-08-19T16:51:24.201,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.201,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:24.202,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.202,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{639, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:24.202,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:24.208,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 639 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:24.208,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 639) [ns_server:debug,2014-08-19T16:51:24.209,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:24.233,ns_1@10.242.238.88:<0.19213.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 884 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:24.233,ns_1@10.242.238.88:<0.19734.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 877 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:24.233,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 884 state to active [rebalance:info,2014-08-19T16:51:24.234,ns_1@10.242.238.88:<0.19213.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 884 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:24.235,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 877 state to active [rebalance:info,2014-08-19T16:51:24.235,ns_1@10.242.238.88:<0.19734.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 877 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:24.236,ns_1@10.242.238.88:<0.19213.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:24.236,ns_1@10.242.238.88:<0.19734.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:24.246,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 4. Nacking mccouch update. [views:debug,2014-08-19T16:51:24.246,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/4. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:24.246,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",4,active,0} [ns_server:debug,2014-08-19T16:51:24.248,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,24,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,22,150,878,73,567, 256,201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515, 332,21,149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774, 591,280,225,825,514,331,20,148,876,71,565,382,199,799,616,305,250,122,850, 539,45,356,173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798, 615,304,249,121,849,538,44,355,172,95,772,589,278,223,823,512,329,18,146,874, 69,563,380,197,797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822, 639,328,17,145,873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93, 770,587,276,221,821,638,327,16,144,872,67,561,378,195,795,612,301,246,118, 846,535,41,352,169,92,769,586,275,220,820,637,326,15,143,871,66,560,377,194, 794,611,300,245,117,845,534,40,351,168,91,768,585,274,219,819,636,325,142,14, 870,65,559,376,193,793,610,299,244,116,844,533,39,350,167,90,895,584,273,218, 818,635,324,141,13,869,64,558,375,192,792,609,298,243,115,843,532,38,349,166, 894,89,583,272,217,817,634,323,140,12,868,63,557,374,191,791,608,297,242,114, 842,531,37,348,165,893,88,582,271,216,816,633,322,139,11,867,62,556,373,190, 790,607,296,241,113,841,530,36,347,164,892,87,581,270,215,815,632,321,138,10, 866,61,555,372,189,789,606,295,240,112,840,529,35,346,163,891,86,580,269,214, 9,814,631,320,137,865,60,554,371,188,788,605,294,239,111,839,528,345,34,162, 890,85,579,268,213,813,8,630,319,136,864,59,553,370,187,787,604,293,238,110, 838,527,344,33,161,889,84,578,267,212,812,7,629,318,135,863,58,552,369,186, 786,603,292,237,109,837,526,343,32,160,888,83,577,266,211,811,628,6,317,134, 862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,82,576,265,210, 810,627,5,316,133,861,56,550,367,184,784,601,290,235,107,835,524,341,30,158, 886,81,575,264,209,809,626,4,315,132,860,55,549,366,183,783,600,289,234,106, 834,523,340,29,157,885,80,574,263,208,808,625,314,3,131,859,548,54,365,182, 782,599,288,233,105,833,522,339,28,156,884,79,573,262,207,807,624,313,130, 858,547,53,364,181,781,598,287,232,104,832,521,338,27,155,883,78,572,261,206, 806,623,312,129,1,857,546,52,363,180,780,597,286,231,103,831,520,337,26,154, 882,77,571,260,205] [views:debug,2014-08-19T16:51:24.280,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/4. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:24.280,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",4,active,0} [rebalance:info,2014-08-19T16:51:24.300,ns_1@10.242.238.88:<0.19584.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 879 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:24.300,ns_1@10.242.238.88:<0.19047.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 886 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:24.301,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 879 state to active [rebalance:info,2014-08-19T16:51:24.302,ns_1@10.242.238.88:<0.19584.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 879 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:24.302,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 886 state to active [rebalance:info,2014-08-19T16:51:24.303,ns_1@10.242.238.88:<0.19047.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 886 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:24.303,ns_1@10.242.238.88:<0.19584.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:24.303,ns_1@10.242.238.88:<0.19047.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:debug,2014-08-19T16:51:24.355,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 2. Nacking mccouch update. [views:debug,2014-08-19T16:51:24.355,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/2. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:24.355,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",2,active,0} [ns_server:debug,2014-08-19T16:51:24.357,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,24,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,22,150,878,73,567, 256,201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515, 332,21,149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774, 591,280,225,825,514,331,20,148,876,71,565,382,199,799,616,305,250,122,850, 539,45,356,173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798, 615,304,249,121,849,538,44,355,172,95,772,589,278,223,823,512,329,18,146,874, 69,563,380,197,797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822, 639,328,17,145,873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93, 770,587,276,221,821,638,327,16,144,872,67,561,378,195,795,612,301,246,118, 846,535,41,352,169,92,769,586,275,220,820,637,326,15,143,871,66,560,377,194, 794,611,300,245,117,845,534,40,351,168,91,768,585,274,219,819,636,325,142,14, 870,65,559,376,193,793,610,299,244,116,844,533,39,350,167,90,895,584,273,218, 818,635,324,141,13,869,64,558,375,192,792,609,298,243,115,843,532,38,349,166, 894,89,583,272,217,817,634,323,140,12,868,63,557,374,191,791,608,297,242,114, 842,531,37,348,165,893,88,582,271,216,816,633,322,139,11,867,62,556,373,190, 790,607,296,241,113,841,530,36,347,164,892,87,581,270,215,815,632,321,138,10, 866,61,555,372,189,789,606,295,240,112,840,529,35,346,163,891,86,580,269,214, 9,814,631,320,137,865,60,554,371,188,788,605,294,239,111,839,528,345,34,162, 890,85,579,268,213,813,8,630,319,136,864,59,553,370,187,787,604,293,238,110, 838,527,344,33,161,889,84,578,267,212,812,7,629,318,135,863,58,552,369,186, 786,603,292,237,109,837,526,343,32,160,888,83,577,266,211,811,628,6,317,134, 862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,82,576,265,210, 810,627,5,316,133,861,56,550,367,184,784,601,290,235,107,835,524,341,30,158, 886,81,575,264,209,809,626,4,315,132,860,55,549,366,183,783,600,289,234,106, 834,523,340,29,157,885,80,574,263,208,808,625,314,3,131,859,548,54,365,182, 782,599,288,233,105,833,522,339,28,156,884,79,573,262,207,807,624,313,2,130, 858,547,53,364,181,781,598,287,232,104,832,521,338,27,155,883,78,572,261,206, 806,623,312,129,1,857,546,52,363,180,780,597,286,231,103,831,520,337,26,154, 882,77,571,260,205] [rebalance:info,2014-08-19T16:51:24.367,ns_1@10.242.238.88:<0.18902.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 888 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:24.367,ns_1@10.242.238.88:<0.19444.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 881 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:24.367,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 888 state to active [rebalance:info,2014-08-19T16:51:24.368,ns_1@10.242.238.88:<0.18902.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 888 on ns_1@10.242.238.88 [ns_server:info,2014-08-19T16:51:24.369,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 881 state to active [rebalance:info,2014-08-19T16:51:24.369,ns_1@10.242.238.88:<0.19444.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 881 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:24.370,ns_1@10.242.238.88:<0.18902.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:24.370,ns_1@10.242.238.88:<0.19444.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [views:debug,2014-08-19T16:51:24.389,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/2. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:24.389,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",2,active,0} [rebalance:info,2014-08-19T16:51:24.434,ns_1@10.242.238.88:<0.21721.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 890) [rebalance:info,2014-08-19T16:51:24.434,ns_1@10.242.238.88:<0.19290.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 883 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:info,2014-08-19T16:51:24.435,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 883 state to active [rebalance:info,2014-08-19T16:51:24.435,ns_1@10.242.238.88:<0.18748.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:24.436,ns_1@10.242.238.88:<0.19290.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 883 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:24.436,ns_1@10.242.238.88:<0.19290.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [ns_server:info,2014-08-19T16:51:24.439,ns_1@10.242.238.88:<0.18756.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_890_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:24.439,ns_1@10.242.238.88:<0.18748.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:51:24.441,ns_1@10.242.238.88:<0.18748.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 890 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.21728.1> [ns_server:info,2014-08-19T16:51:24.442,ns_1@10.242.238.88:<0.21728.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 890 to state replica [ns_server:debug,2014-08-19T16:51:24.465,ns_1@10.242.238.88:<0.21728.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_890 [rebalance:info,2014-08-19T16:51:24.466,ns_1@10.242.238.88:<0.21728.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[890]}, {checkpoints,[{890,1}]}, {name,<<"rebalance_890">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[890]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"890"}]} [rebalance:debug,2014-08-19T16:51:24.467,ns_1@10.242.238.88:<0.21728.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21734.1> [rebalance:info,2014-08-19T16:51:24.468,ns_1@10.242.238.88:<0.21735.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 892) [rebalance:info,2014-08-19T16:51:24.468,ns_1@10.242.238.88:<0.19135.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 885 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:24.468,ns_1@10.242.238.88:<0.21728.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:51:24.468,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 885 state to active [rebalance:info,2014-08-19T16:51:24.469,ns_1@10.242.238.88:<0.18608.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:24.469,ns_1@10.242.238.88:<0.19135.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 885 on ns_1@10.242.238.88 [rebalance:debug,2014-08-19T16:51:24.470,ns_1@10.242.238.88:<0.21728.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.470,ns_1@10.242.238.88:<0.21728.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.470,ns_1@10.242.238.88:<0.19135.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:24.471,ns_1@10.242.238.88:<0.18748.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 890 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:info,2014-08-19T16:51:24.472,ns_1@10.242.238.88:<0.18616.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_892_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:24.472,ns_1@10.242.238.88:<0.18608.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:debug,2014-08-19T16:51:24.473,ns_1@10.242.238.88:<0.18756.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.474,ns_1@10.242.238.88:<0.18608.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 892 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.21751.1> [ns_server:info,2014-08-19T16:51:24.475,ns_1@10.242.238.88:<0.21751.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 892 to state replica [ns_server:info,2014-08-19T16:51:24.476,ns_1@10.242.238.88:<0.18756.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_890_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.476,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 890 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:24.476,ns_1@10.242.238.88:<0.21755.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 890 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:51:24.497,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.498,ns_1@10.242.238.88:<0.21751.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_892 [ns_server:debug,2014-08-19T16:51:24.498,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{890, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:24.499,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:24.499,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.499,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:24.499,ns_1@10.242.238.88:<0.21751.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[892]}, {checkpoints,[{892,1}]}, {name,<<"rebalance_892">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[892]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"892"}]} [rebalance:debug,2014-08-19T16:51:24.500,ns_1@10.242.238.88:<0.21751.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21759.1> [rebalance:info,2014-08-19T16:51:24.501,ns_1@10.242.238.88:<0.21751.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:51:24.502,ns_1@10.242.238.88:<0.18970.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 887 state change: {'ns_1@10.242.238.88',active,paused,undefined} [rebalance:info,2014-08-19T16:51:24.502,ns_1@10.242.238.88:<0.21765.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 894) [rebalance:debug,2014-08-19T16:51:24.503,ns_1@10.242.238.88:<0.21751.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [ns_server:info,2014-08-19T16:51:24.503,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 887 state to active [rebalance:info,2014-08-19T16:51:24.503,ns_1@10.242.238.88:<0.21751.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.504,ns_1@10.242.238.88:<0.18970.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 887 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:24.504,ns_1@10.242.238.88:<0.18970.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:24.506,ns_1@10.242.238.88:<0.18608.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 892 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:info,2014-08-19T16:51:24.506,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 890 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:51:24.506,ns_1@10.242.238.88:<0.18468.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:debug,2014-08-19T16:51:24.506,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 890) [ns_server:debug,2014-08-19T16:51:24.507,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:debug,2014-08-19T16:51:24.507,ns_1@10.242.238.88:<0.18616.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:24.509,ns_1@10.242.238.88:<0.18476.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_894_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:24.509,ns_1@10.242.238.88:<0.18468.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.510,ns_1@10.242.238.88:<0.18616.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_892_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.510,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 892 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:24.511,ns_1@10.242.238.88:<0.21776.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 892 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:51:24.512,ns_1@10.242.238.88:<0.18468.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 894 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.21777.1> [ns_server:info,2014-08-19T16:51:24.513,ns_1@10.242.238.88:<0.21777.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 894 to state replica [ns_server:debug,2014-08-19T16:51:24.531,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.532,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.532,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:24.532,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.532,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{892, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:51:24.535,ns_1@10.242.238.88:<0.18825.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 889 state change: {'ns_1@10.242.238.88',active,paused,undefined} [ns_server:debug,2014-08-19T16:51:24.536,ns_1@10.242.238.88:<0.21777.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_894 [ns_server:info,2014-08-19T16:51:24.536,ns_1@10.242.238.88:<0.19216.0>:ns_memcached:do_handle_call:527]Changed vbucket 889 state to active [rebalance:info,2014-08-19T16:51:24.537,ns_1@10.242.238.88:<0.18825.1>:janitor_agent:get_replication_persistence_checkpoint_id:569]default: Doing get_replication_persistence_checkpoint_id call for vbucket 889 on ns_1@10.242.238.88 [rebalance:info,2014-08-19T16:51:24.537,ns_1@10.242.238.88:<0.18825.1>:ns_single_vbucket_mover:mover_inner:253]Will wait for checkpoint 1 on replicas [rebalance:info,2014-08-19T16:51:24.538,ns_1@10.242.238.88:<0.21777.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[894]}, {checkpoints,[{894,1}]}, {name,<<"rebalance_894">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[894]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"894"}]} [rebalance:info,2014-08-19T16:51:24.538,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 892 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:24.539,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 892) [rebalance:debug,2014-08-19T16:51:24.539,ns_1@10.242.238.88:<0.21777.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.21791.1> [ns_server:debug,2014-08-19T16:51:24.539,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:24.540,ns_1@10.242.238.88:<0.21777.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:24.541,ns_1@10.242.238.88:<0.21777.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.542,ns_1@10.242.238.88:<0.21777.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.543,ns_1@10.242.238.88:<0.18468.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 894 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:24.545,ns_1@10.242.238.88:<0.18476.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:24.548,ns_1@10.242.238.88:<0.18476.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_894_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.548,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 894 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:24.548,ns_1@10.242.238.88:<0.21795.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 894 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:51:24.556,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:110]Added _local/vbuuid document into vb: 0. Nacking mccouch update. [views:debug,2014-08-19T16:51:24.556,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/0. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:24.556,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",0,active,0} [ns_server:debug,2014-08-19T16:51:24.558,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:387]Usable vbuckets: [805,622,311,128,0,856,545,51,362,179,779,596,285,230,102,830,519,336,25,153, 881,76,570,259,204,804,621,310,255,127,855,544,50,361,178,778,595,284,229, 101,829,518,335,24,152,880,75,569,258,203,803,620,309,254,126,854,543,49,360, 177,777,594,283,228,100,828,517,334,23,151,879,74,568,257,202,802,619,308, 253,125,853,542,48,359,176,99,776,593,282,227,827,516,333,22,150,878,73,567, 256,201,801,618,307,252,124,852,541,47,358,175,98,775,592,281,226,826,515, 332,21,149,877,72,566,383,200,800,617,306,251,123,851,540,46,357,174,97,774, 591,280,225,825,514,331,20,148,876,71,565,382,199,799,616,305,250,122,850, 539,45,356,173,96,773,590,279,224,824,513,330,19,147,875,70,564,381,198,798, 615,304,249,121,849,538,44,355,172,95,772,589,278,223,823,512,329,18,146,874, 69,563,380,197,797,614,303,248,120,848,537,43,354,171,94,771,588,277,222,822, 639,328,17,145,873,68,562,379,196,796,613,302,247,119,847,536,42,353,170,93, 770,587,276,221,821,638,327,16,144,872,67,561,378,195,795,612,301,246,118, 846,535,41,352,169,92,769,586,275,220,820,637,326,15,143,871,66,560,377,194, 794,611,300,245,117,845,534,40,351,168,91,768,585,274,219,819,636,325,142,14, 870,65,559,376,193,793,610,299,244,116,844,533,39,350,167,90,895,584,273,218, 818,635,324,141,13,869,64,558,375,192,792,609,298,243,115,843,532,38,349,166, 894,89,583,272,217,817,634,323,140,12,868,63,557,374,191,791,608,297,242,114, 842,531,37,348,165,893,88,582,271,216,816,633,322,139,11,867,62,556,373,190, 790,607,296,241,113,841,530,36,347,164,892,87,581,270,215,815,632,321,138,10, 866,61,555,372,189,789,606,295,240,112,840,529,35,346,163,891,86,580,269,214, 9,814,631,320,137,865,60,554,371,188,788,605,294,239,111,839,528,345,34,162, 890,85,579,268,213,813,8,630,319,136,864,59,553,370,187,787,604,293,238,110, 838,527,344,33,161,889,84,578,267,212,812,7,629,318,135,863,58,552,369,186, 786,603,292,237,109,837,526,343,32,160,888,83,577,266,211,811,628,6,317,134, 862,57,551,368,185,785,602,291,236,108,836,525,342,31,159,887,82,576,265,210, 810,627,5,316,133,861,56,550,367,184,784,601,290,235,107,835,524,341,30,158, 886,81,575,264,209,809,626,4,315,132,860,55,549,366,183,783,600,289,234,106, 834,523,340,29,157,885,80,574,263,208,808,625,314,3,131,859,548,54,365,182, 782,599,288,233,105,833,522,339,28,156,884,79,573,262,207,807,624,313,2,130, 858,547,53,364,181,781,598,287,232,104,832,521,338,27,155,883,78,572,261,206, 806,623,312,129,1,857,546,52,363,180,780,597,286,231,103,831,520,337,26,154, 882,77,571,260,205] [ns_server:debug,2014-08-19T16:51:24.570,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.571,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.572,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{894, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:24.572,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:24.572,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:24.583,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 894 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:24.583,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 894) [ns_server:debug,2014-08-19T16:51:24.584,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:24.599,ns_1@10.242.238.88:<0.21805.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 891) [rebalance:info,2014-08-19T16:51:24.599,ns_1@10.242.238.88:<0.21806.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 895) [rebalance:info,2014-08-19T16:51:24.600,ns_1@10.242.238.88:<0.21807.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 893) [rebalance:info,2014-08-19T16:51:24.600,ns_1@10.242.238.88:<0.21808.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 364) [rebalance:info,2014-08-19T16:51:24.600,ns_1@10.242.238.88:<0.21809.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 366) [rebalance:info,2014-08-19T16:51:24.600,ns_1@10.242.238.88:<0.21810.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 368) [rebalance:info,2014-08-19T16:51:24.600,ns_1@10.242.238.88:<0.21811.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 372) [rebalance:info,2014-08-19T16:51:24.600,ns_1@10.242.238.88:<0.21812.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 370) [rebalance:info,2014-08-19T16:51:24.601,ns_1@10.242.238.88:<0.18685.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:24.600,ns_1@10.242.238.88:<0.21813.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 376) [rebalance:info,2014-08-19T16:51:24.601,ns_1@10.242.238.88:<0.21814.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 374) [rebalance:info,2014-08-19T16:51:24.601,ns_1@10.242.238.88:<0.18426.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:24.601,ns_1@10.242.238.88:<0.18545.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:24.601,ns_1@10.242.238.88:<0.21815.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 363) [rebalance:info,2014-08-19T16:51:24.601,ns_1@10.242.238.88:<0.21816.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 365) [rebalance:info,2014-08-19T16:51:24.601,ns_1@10.242.238.88:<0.21817.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 378) [rebalance:info,2014-08-19T16:51:24.601,ns_1@10.242.238.88:<0.21818.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 367) [rebalance:info,2014-08-19T16:51:24.601,ns_1@10.242.238.88:<0.21819.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 620) [rebalance:info,2014-08-19T16:51:24.602,ns_1@10.242.238.88:<0.21821.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 622) [rebalance:info,2014-08-19T16:51:24.602,ns_1@10.242.238.88:<0.21820.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 369) [rebalance:info,2014-08-19T16:51:24.602,ns_1@10.242.238.88:<0.21822.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 624) [rebalance:info,2014-08-19T16:51:24.602,ns_1@10.242.238.88:<0.21824.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 371) [rebalance:info,2014-08-19T16:51:24.602,ns_1@10.242.238.88:<0.21823.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 626) [rebalance:info,2014-08-19T16:51:24.602,ns_1@10.242.238.88:<0.21825.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 628) [rebalance:info,2014-08-19T16:51:24.602,ns_1@10.242.238.88:<0.21826.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 373) [rebalance:info,2014-08-19T16:51:24.602,ns_1@10.242.238.88:<0.21827.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 630) [rebalance:info,2014-08-19T16:51:24.603,ns_1@10.242.238.88:<0.19855.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:24.602,ns_1@10.242.238.88:<0.21828.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 375) [rebalance:info,2014-08-19T16:51:24.603,ns_1@10.242.238.88:<0.21829.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 377) [rebalance:info,2014-08-19T16:51:24.603,ns_1@10.242.238.88:<0.21830.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.89 (vbucket 379) [rebalance:info,2014-08-19T16:51:24.603,ns_1@10.242.238.88:<0.19682.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:24.603,ns_1@10.242.238.88:<0.21831.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 632) [rebalance:info,2014-08-19T16:51:24.603,ns_1@10.242.238.88:<0.21832.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 874) [rebalance:info,2014-08-19T16:51:24.603,ns_1@10.242.238.88:<0.19542.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:24.603,ns_1@10.242.238.88:<0.21833.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 619) [rebalance:info,2014-08-19T16:51:24.603,ns_1@10.242.238.88:<0.19388.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:24.603,ns_1@10.242.238.88:<0.21834.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 876) [rebalance:info,2014-08-19T16:51:24.603,ns_1@10.242.238.88:<0.19248.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:24.603,ns_1@10.242.238.88:<0.21835.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 621) [rebalance:info,2014-08-19T16:51:24.603,ns_1@10.242.238.88:<0.21836.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 878) [rebalance:info,2014-08-19T16:51:24.604,ns_1@10.242.238.88:<0.21837.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 623) [rebalance:info,2014-08-19T16:51:24.604,ns_1@10.242.238.88:<0.21838.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 880) [rebalance:info,2014-08-19T16:51:24.604,ns_1@10.242.238.88:<0.19090.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:24.604,ns_1@10.242.238.88:<0.20060.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:24.604,ns_1@10.242.238.88:<0.21840.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 625) [rebalance:info,2014-08-19T16:51:24.604,ns_1@10.242.238.88:<0.19826.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:24.604,ns_1@10.242.238.88:<0.21841.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 882) [rebalance:info,2014-08-19T16:51:24.604,ns_1@10.242.238.88:<0.21843.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 627) [rebalance:info,2014-08-19T16:51:24.604,ns_1@10.242.238.88:<0.21844.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 884) [rebalance:info,2014-08-19T16:51:24.604,ns_1@10.242.238.88:<0.19661.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:24.604,ns_1@10.242.238.88:<0.18923.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:24.604,ns_1@10.242.238.88:<0.21845.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 631) [ns_server:info,2014-08-19T16:51:24.604,ns_1@10.242.238.88:<0.18693.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_891_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:24.605,ns_1@10.242.238.88:<0.21848.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 629) [rebalance:info,2014-08-19T16:51:24.605,ns_1@10.242.238.88:<0.20003.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:24.605,ns_1@10.242.238.88:<0.21850.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 888) [ns_server:info,2014-08-19T16:51:24.605,ns_1@10.242.238.88:<0.18553.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_893_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:24.605,ns_1@10.242.238.88:<0.18685.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:51:24.605,ns_1@10.242.238.88:<0.21851.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 886) [rebalance:info,2014-08-19T16:51:24.605,ns_1@10.242.238.88:<0.19765.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:24.605,ns_1@10.242.238.88:<0.21852.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.90 (vbucket 633) [rebalance:info,2014-08-19T16:51:24.605,ns_1@10.242.238.88:<0.18545.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:51:24.605,ns_1@10.242.238.88:<0.19507.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:info,2014-08-19T16:51:24.605,ns_1@10.242.238.88:<0.18434.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_895_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:24.605,ns_1@10.242.238.88:<0.21854.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 877) [rebalance:info,2014-08-19T16:51:24.605,ns_1@10.242.238.88:<0.19605.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:24.605,ns_1@10.242.238.88:<0.21855.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 875) [rebalance:info,2014-08-19T16:51:24.605,ns_1@10.242.238.88:<0.18426.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:51:24.605,ns_1@10.242.238.88:<0.21857.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 881) [rebalance:info,2014-08-19T16:51:24.605,ns_1@10.242.238.88:<0.21856.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 879) [rebalance:info,2014-08-19T16:51:24.606,ns_1@10.242.238.88:<0.21858.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 887) [rebalance:info,2014-08-19T16:51:24.606,ns_1@10.242.238.88:<0.19465.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:24.606,ns_1@10.242.238.88:<0.21859.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 883) [rebalance:info,2014-08-19T16:51:24.606,ns_1@10.242.238.88:<0.19005.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:24.606,ns_1@10.242.238.88:<0.19367.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:24.606,ns_1@10.242.238.88:<0.21860.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 885) [rebalance:info,2014-08-19T16:51:24.606,ns_1@10.242.238.88:<0.19883.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:24.606,ns_1@10.242.238.88:<0.19325.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:24.606,ns_1@10.242.238.88:<0.19161.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:24.606,ns_1@10.242.238.88:<0.19563.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:24.606,ns_1@10.242.238.88:<0.19213.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:24.606,ns_1@10.242.238.88:<0.18846.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.90' [rebalance:info,2014-08-19T16:51:24.606,ns_1@10.242.238.88:<0.19704.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:24.607,ns_1@10.242.238.88:<0.18902.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:24.607,ns_1@10.242.238.88:<0.19269.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:24.607,ns_1@10.242.238.88:<0.19409.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:24.607,ns_1@10.242.238.88:<0.18944.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:24.607,ns_1@10.242.238.88:<0.19047.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:24.607,ns_1@10.242.238.88:<0.19734.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:24.607,ns_1@10.242.238.88:<0.19960.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:24.607,ns_1@10.242.238.88:<0.20032.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:24.607,ns_1@10.242.238.88:<0.19111.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:24.607,ns_1@10.242.238.88:<0.19444.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:24.607,ns_1@10.242.238.88:<0.19584.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:24.608,ns_1@10.242.238.88:<0.19796.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:24.608,ns_1@10.242.238.88:<0.18804.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [ns_server:info,2014-08-19T16:51:24.608,ns_1@10.242.238.88:<0.19863.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_620_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:24.608,ns_1@10.242.238.88:<0.18970.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:24.608,ns_1@10.242.238.88:<0.19640.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:24.608,ns_1@10.242.238.88:<0.19855.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:51:24.608,ns_1@10.242.238.88:<0.19290.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:24.608,ns_1@10.242.238.88:<0.19191.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:24.608,ns_1@10.242.238.88:<0.19486.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:24.608,ns_1@10.242.238.88:<0.19135.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [rebalance:info,2014-08-19T16:51:24.609,ns_1@10.242.238.88:<0.19346.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [ns_server:info,2014-08-19T16:51:24.609,ns_1@10.242.238.88:<0.19550.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_624_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:24.609,ns_1@10.242.238.88:<0.18881.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:24.609,ns_1@10.242.238.88:<0.19542.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.609,ns_1@10.242.238.88:<0.19690.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_622_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:24.609,ns_1@10.242.238.88:<0.18727.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [rebalance:info,2014-08-19T16:51:24.610,ns_1@10.242.238.88:<0.19682.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:51:24.610,ns_1@10.242.238.88:<0.19026.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.89' [ns_server:info,2014-08-19T16:51:24.612,ns_1@10.242.238.88:<0.19396.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_626_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:24.612,ns_1@10.242.238.88:<0.19388.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.613,ns_1@10.242.238.88:<0.19256.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_628_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:24.613,ns_1@10.242.238.88:<0.19248.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.616,ns_1@10.242.238.88:<0.19098.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_630_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:24.616,ns_1@10.242.238.88:<0.19090.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.621,ns_1@10.242.238.88:<0.20069.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_874_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:24.622,ns_1@10.242.238.88:<0.20060.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.623,ns_1@10.242.238.88:<0.18931.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_632_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:24.624,ns_1@10.242.238.88:<0.18923.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.627,ns_1@10.242.238.88:<0.20011.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_619_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:24.627,ns_1@10.242.238.88:<0.20003.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.629,ns_1@10.242.238.88:<0.19669.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_878_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:24.629,ns_1@10.242.238.88:<0.19661.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.630,ns_1@10.242.238.88:<0.19835.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_876_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:24.630,ns_1@10.242.238.88:<0.19826.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.630,ns_1@10.242.238.88:<0.19773.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_621_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:24.630,ns_1@10.242.238.88:<0.19765.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.630,ns_1@10.242.238.88:<0.19613.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_623_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:24.630,ns_1@10.242.238.88:<0.19605.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.630,ns_1@10.242.238.88:<0.19515.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_880_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:24.631,ns_1@10.242.238.88:<0.19507.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.631,ns_1@10.242.238.88:<0.19013.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_631_'ns_1@10.242.238.90'">>] [ns_server:info,2014-08-19T16:51:24.631,ns_1@10.242.238.88:<0.19333.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_627_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:24.632,ns_1@10.242.238.88:<0.19005.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:51:24.632,ns_1@10.242.238.88:<0.19325.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.632,ns_1@10.242.238.88:<0.19473.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_625_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:24.632,ns_1@10.242.238.88:<0.19465.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.632,ns_1@10.242.238.88:<0.19375.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_882_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:24.632,ns_1@10.242.238.88:<0.19367.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.633,ns_1@10.242.238.88:<0.19172.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_629_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:24.633,ns_1@10.242.238.88:<0.19161.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.633,ns_1@10.242.238.88:<0.18854.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_633_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:24.633,ns_1@10.242.238.88:<0.18846.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.633,ns_1@10.242.238.88:<0.18910.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_888_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:24.634,ns_1@10.242.238.88:<0.18902.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:51:24.634,ns_1@10.242.238.88:<0.18685.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 891 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.21873.1> [ns_server:info,2014-08-19T16:51:24.634,ns_1@10.242.238.88:<0.19892.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_364_'ns_1@10.242.238.89'">>] [ns_server:info,2014-08-19T16:51:24.634,ns_1@10.242.238.88:<0.19221.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_884_'ns_1@10.242.238.91'">>] [ns_server:info,2014-08-19T16:51:24.635,ns_1@10.242.238.88:<0.21873.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 891 to state replica [rebalance:info,2014-08-19T16:51:24.636,ns_1@10.242.238.88:<0.19883.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:51:24.636,ns_1@10.242.238.88:<0.19213.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.636,ns_1@10.242.238.88:<0.19571.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_368_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.637,ns_1@10.242.238.88:<0.19563.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.637,ns_1@10.242.238.88:<0.19417.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_370_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.637,ns_1@10.242.238.88:<0.19409.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.637,ns_1@10.242.238.88:<0.19713.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_366_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.637,ns_1@10.242.238.88:<0.19704.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.638,ns_1@10.242.238.88:<0.19743.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_877_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:24.638,ns_1@10.242.238.88:<0.19734.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.638,ns_1@10.242.238.88:<0.19277.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_372_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.638,ns_1@10.242.238.88:<0.19269.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.639,ns_1@10.242.238.88:<0.19055.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_886_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:24.639,ns_1@10.242.238.88:<0.19047.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.639,ns_1@10.242.238.88:<0.19452.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_881_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:24.639,ns_1@10.242.238.88:<0.19444.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.639,ns_1@10.242.238.88:<0.18952.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_376_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:24.639,ns_1@10.242.238.88:<0.18545.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 893 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.21874.1> [ns_server:info,2014-08-19T16:51:24.640,ns_1@10.242.238.88:<0.19592.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_879_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:24.640,ns_1@10.242.238.88:<0.18944.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:51:24.640,ns_1@10.242.238.88:<0.19584.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.640,ns_1@10.242.238.88:<0.21874.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 893 to state replica [views:debug,2014-08-19T16:51:24.640,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/0. Updated state: active (0) [ns_server:debug,2014-08-19T16:51:24.640,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",0,active,0} [ns_server:info,2014-08-19T16:51:24.641,ns_1@10.242.238.88:<0.19973.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_875_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:24.641,ns_1@10.242.238.88:<0.19960.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.641,ns_1@10.242.238.88:<0.18978.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_887_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:24.641,ns_1@10.242.238.88:<0.18970.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.642,ns_1@10.242.238.88:<0.20040.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_363_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.642,ns_1@10.242.238.88:<0.20032.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.642,ns_1@10.242.238.88:<0.19298.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_883_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:24.642,ns_1@10.242.238.88:<0.19290.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.642,ns_1@10.242.238.88:<0.18812.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_378_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.643,ns_1@10.242.238.88:<0.18804.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.643,ns_1@10.242.238.88:<0.19805.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_365_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.643,ns_1@10.242.238.88:<0.19796.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.643,ns_1@10.242.238.88:<0.19143.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_885_'ns_1@10.242.238.91'">>] [rebalance:info,2014-08-19T16:51:24.644,ns_1@10.242.238.88:<0.19135.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.644,ns_1@10.242.238.88:<0.19648.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_367_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.644,ns_1@10.242.238.88:<0.19640.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:51:24.645,ns_1@10.242.238.88:<0.18426.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 895 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.21879.1> [ns_server:info,2014-08-19T16:51:24.645,ns_1@10.242.238.88:<0.21879.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 895 to state replica [rebalance:info,2014-08-19T16:51:24.649,ns_1@10.242.238.88:<0.21955.1>:janitor_agent:wait_index_updated:558]default: Doing wait_index_updated call for ns_1@10.242.238.91 (vbucket 889) [rebalance:info,2014-08-19T16:51:24.650,ns_1@10.242.238.88:<0.18825.1>:ns_single_vbucket_mover:mover_inner:270]Done waiting for index updating. Will shutdown replicator into 'ns_1@10.242.238.91' [ns_server:debug,2014-08-19T16:51:24.653,ns_1@10.242.238.88:<0.19855.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 620 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.21919.1> [ns_server:info,2014-08-19T16:51:24.654,ns_1@10.242.238.88:<0.21919.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 620 to state replica [ns_server:debug,2014-08-19T16:51:24.661,ns_1@10.242.238.88:<0.19542.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 624 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.21948.1> [ns_server:debug,2014-08-19T16:51:24.661,ns_1@10.242.238.88:<0.19682.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 622 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.21952.1> [ns_server:debug,2014-08-19T16:51:24.662,ns_1@10.242.238.88:<0.19388.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 626 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.21954.1> [ns_server:info,2014-08-19T16:51:24.662,ns_1@10.242.238.88:<0.21952.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 622 to state replica [ns_server:info,2014-08-19T16:51:24.662,ns_1@10.242.238.88:<0.21948.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 624 to state replica [ns_server:info,2014-08-19T16:51:24.665,ns_1@10.242.238.88:<0.21954.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 626 to state replica [ns_server:debug,2014-08-19T16:51:24.665,ns_1@10.242.238.88:<0.19248.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 628 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.21956.1> [ns_server:info,2014-08-19T16:51:24.667,ns_1@10.242.238.88:<0.19119.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_374_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.668,ns_1@10.242.238.88:<0.19111.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.668,ns_1@10.242.238.88:<0.19199.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_373_'ns_1@10.242.238.89'">>] [ns_server:info,2014-08-19T16:51:24.668,ns_1@10.242.238.88:<0.19494.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_369_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.668,ns_1@10.242.238.88:<0.19191.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [rebalance:info,2014-08-19T16:51:24.668,ns_1@10.242.238.88:<0.19486.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.668,ns_1@10.242.238.88:<0.21956.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 628 to state replica [ns_server:info,2014-08-19T16:51:24.668,ns_1@10.242.238.88:<0.19034.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_375_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.669,ns_1@10.242.238.88:<0.19026.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.669,ns_1@10.242.238.88:<0.18889.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_377_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.669,ns_1@10.242.238.88:<0.18881.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.669,ns_1@10.242.238.88:<0.18735.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_379_'ns_1@10.242.238.89'">>] [ns_server:info,2014-08-19T16:51:24.669,ns_1@10.242.238.88:<0.19354.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_371_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.669,ns_1@10.242.238.88:<0.18727.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:debug,2014-08-19T16:51:24.669,ns_1@10.242.238.88:<0.19090.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 630 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.21957.1> [rebalance:info,2014-08-19T16:51:24.669,ns_1@10.242.238.88:<0.19346.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.670,ns_1@10.242.238.88:<0.21957.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 630 to state replica [ns_server:debug,2014-08-19T16:51:24.681,ns_1@10.242.238.88:<0.20060.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 874 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.21958.1> [ns_server:debug,2014-08-19T16:51:24.686,ns_1@10.242.238.88:<0.18923.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 632 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.21959.1> [ns_server:debug,2014-08-19T16:51:24.691,ns_1@10.242.238.88:<0.21873.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_891 [ns_server:info,2014-08-19T16:51:24.694,ns_1@10.242.238.88:<0.21958.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 874 to state replica [ns_server:info,2014-08-19T16:51:24.695,ns_1@10.242.238.88:<0.21959.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 632 to state replica [ns_server:debug,2014-08-19T16:51:24.697,ns_1@10.242.238.88:<0.20003.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 619 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.21960.1> [ns_server:debug,2014-08-19T16:51:24.697,ns_1@10.242.238.88:<0.19661.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 878 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.21961.1> [ns_server:debug,2014-08-19T16:51:24.697,ns_1@10.242.238.88:<0.19826.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 876 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.21962.1> [ns_server:debug,2014-08-19T16:51:24.698,ns_1@10.242.238.88:<0.19507.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 880 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.21965.1> [ns_server:debug,2014-08-19T16:51:24.698,ns_1@10.242.238.88:<0.19765.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 621 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.21964.1> [ns_server:debug,2014-08-19T16:51:24.700,ns_1@10.242.238.88:<0.19325.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 627 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.21967.1> [ns_server:info,2014-08-19T16:51:24.703,ns_1@10.242.238.88:<0.21960.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 619 to state replica [ns_server:debug,2014-08-19T16:51:24.703,ns_1@10.242.238.88:<0.19605.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 623 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.21963.1> [ns_server:debug,2014-08-19T16:51:24.703,ns_1@10.242.238.88:<0.19005.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 631 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.21966.1> [rebalance:info,2014-08-19T16:51:24.703,ns_1@10.242.238.88:<0.21873.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[891]}, {checkpoints,[{891,1}]}, {name,<<"rebalance_891">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[891]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"891"}]} [ns_server:debug,2014-08-19T16:51:24.703,ns_1@10.242.238.88:<0.19367.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 882 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.21971.1> [ns_server:info,2014-08-19T16:51:24.703,ns_1@10.242.238.88:<0.21961.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 878 to state replica [ns_server:debug,2014-08-19T16:51:24.703,ns_1@10.242.238.88:<0.19465.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 625 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.21968.1> [ns_server:info,2014-08-19T16:51:24.704,ns_1@10.242.238.88:<0.21962.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 876 to state replica [ns_server:info,2014-08-19T16:51:24.704,ns_1@10.242.238.88:<0.21964.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 621 to state replica [ns_server:info,2014-08-19T16:51:24.704,ns_1@10.242.238.88:<0.21965.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 880 to state replica [ns_server:debug,2014-08-19T16:51:24.704,ns_1@10.242.238.88:<0.19161.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 629 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.21974.1> [ns_server:info,2014-08-19T16:51:24.704,ns_1@10.242.238.88:<0.21963.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 623 to state replica [ns_server:info,2014-08-19T16:51:24.704,ns_1@10.242.238.88:<0.21967.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 627 to state replica [rebalance:debug,2014-08-19T16:51:24.704,ns_1@10.242.238.88:<0.21873.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22000.1> [ns_server:info,2014-08-19T16:51:24.704,ns_1@10.242.238.88:<0.21966.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 631 to state replica [ns_server:debug,2014-08-19T16:51:24.704,ns_1@10.242.238.88:<0.18846.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 633 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.90': <0.21978.1> [ns_server:info,2014-08-19T16:51:24.704,ns_1@10.242.238.88:<0.21968.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 625 to state replica [ns_server:info,2014-08-19T16:51:24.704,ns_1@10.242.238.88:<0.21971.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 882 to state replica [ns_server:info,2014-08-19T16:51:24.705,ns_1@10.242.238.88:<0.21974.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 629 to state replica [ns_server:info,2014-08-19T16:51:24.705,ns_1@10.242.238.88:<0.21978.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.90",11209} vbucket 633 to state replica [ns_server:debug,2014-08-19T16:51:24.706,ns_1@10.242.238.88:<0.18902.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 888 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.21979.1> [ns_server:debug,2014-08-19T16:51:24.706,ns_1@10.242.238.88:<0.19734.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 877 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.21981.1> [ns_server:debug,2014-08-19T16:51:24.706,ns_1@10.242.238.88:<0.19883.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 364 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.21982.1> [ns_server:debug,2014-08-19T16:51:24.706,ns_1@10.242.238.88:<0.19213.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 884 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.21980.1> [ns_server:debug,2014-08-19T16:51:24.707,ns_1@10.242.238.88:<0.19563.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 368 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.21983.1> [ns_server:info,2014-08-19T16:51:24.707,ns_1@10.242.238.88:<0.21981.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 877 to state replica [ns_server:info,2014-08-19T16:51:24.707,ns_1@10.242.238.88:<0.21979.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 888 to state replica [ns_server:info,2014-08-19T16:51:24.707,ns_1@10.242.238.88:<0.21980.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 884 to state replica [ns_server:info,2014-08-19T16:51:24.707,ns_1@10.242.238.88:<0.21982.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 364 to state replica [ns_server:debug,2014-08-19T16:51:24.707,ns_1@10.242.238.88:<0.21874.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_893 [ns_server:debug,2014-08-19T16:51:24.707,ns_1@10.242.238.88:<0.19409.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 370 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.21984.1> [ns_server:debug,2014-08-19T16:51:24.707,ns_1@10.242.238.88:<0.19704.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 366 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.21985.1> [ns_server:debug,2014-08-19T16:51:24.707,ns_1@10.242.238.88:<0.19269.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 372 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.21986.1> [ns_server:debug,2014-08-19T16:51:24.708,ns_1@10.242.238.88:<0.19444.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 881 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.21988.1> [ns_server:info,2014-08-19T16:51:24.708,ns_1@10.242.238.88:<0.21983.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 368 to state replica [ns_server:debug,2014-08-19T16:51:24.708,ns_1@10.242.238.88:<0.18944.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 376 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.21989.1> [ns_server:debug,2014-08-19T16:51:24.708,ns_1@10.242.238.88:<0.19960.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 875 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.21990.1> [ns_server:info,2014-08-19T16:51:24.708,ns_1@10.242.238.88:<0.21986.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 372 to state replica [ns_server:info,2014-08-19T16:51:24.708,ns_1@10.242.238.88:<0.21984.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 370 to state replica [ns_server:info,2014-08-19T16:51:24.708,ns_1@10.242.238.88:<0.21985.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 366 to state replica [ns_server:info,2014-08-19T16:51:24.708,ns_1@10.242.238.88:<0.21988.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 881 to state replica [ns_server:info,2014-08-19T16:51:24.709,ns_1@10.242.238.88:<0.21989.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 376 to state replica [ns_server:debug,2014-08-19T16:51:24.709,ns_1@10.242.238.88:<0.20032.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 363 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.21991.1> [ns_server:debug,2014-08-19T16:51:24.709,ns_1@10.242.238.88:<0.19584.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 879 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.21992.1> [ns_server:info,2014-08-19T16:51:24.709,ns_1@10.242.238.88:<0.21990.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 875 to state replica [ns_server:debug,2014-08-19T16:51:24.709,ns_1@10.242.238.88:<0.18970.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 887 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.21993.1> [ns_server:debug,2014-08-19T16:51:24.709,ns_1@10.242.238.88:<0.19796.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 365 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.21994.1> [ns_server:debug,2014-08-19T16:51:24.709,ns_1@10.242.238.88:<0.19290.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 883 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.21995.1> [ns_server:debug,2014-08-19T16:51:24.710,ns_1@10.242.238.88:<0.18804.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 378 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.21996.1> [ns_server:debug,2014-08-19T16:51:24.710,ns_1@10.242.238.88:<0.19640.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 367 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.21997.1> [ns_server:debug,2014-08-19T16:51:24.710,ns_1@10.242.238.88:<0.19047.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 886 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.21987.1> [ns_server:debug,2014-08-19T16:51:24.710,ns_1@10.242.238.88:<0.19135.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 885 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.21998.1> [ns_server:info,2014-08-19T16:51:24.710,ns_1@10.242.238.88:<0.21992.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 879 to state replica [ns_server:info,2014-08-19T16:51:24.710,ns_1@10.242.238.88:<0.21991.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 363 to state replica [ns_server:info,2014-08-19T16:51:24.710,ns_1@10.242.238.88:<0.21995.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 883 to state replica [ns_server:info,2014-08-19T16:51:24.710,ns_1@10.242.238.88:<0.21993.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 887 to state replica [ns_server:info,2014-08-19T16:51:24.710,ns_1@10.242.238.88:<0.21994.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 365 to state replica [ns_server:info,2014-08-19T16:51:24.710,ns_1@10.242.238.88:<0.21996.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 378 to state replica [ns_server:info,2014-08-19T16:51:24.710,ns_1@10.242.238.88:<0.18833.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_889_'ns_1@10.242.238.91'">>] [ns_server:info,2014-08-19T16:51:24.710,ns_1@10.242.238.88:<0.21997.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 367 to state replica [ns_server:debug,2014-08-19T16:51:24.716,ns_1@10.242.238.88:<0.21879.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_895 [rebalance:info,2014-08-19T16:51:24.720,ns_1@10.242.238.88:<0.21873.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:51:24.721,ns_1@10.242.238.88:<0.21987.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 886 to state replica [ns_server:debug,2014-08-19T16:51:24.721,ns_1@10.242.238.88:<0.19486.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 369 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.22002.1> [ns_server:debug,2014-08-19T16:51:24.721,ns_1@10.242.238.88:<0.19111.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 374 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.22003.1> [ns_server:info,2014-08-19T16:51:24.721,ns_1@10.242.238.88:<0.21998.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 885 to state replica [ns_server:debug,2014-08-19T16:51:24.722,ns_1@10.242.238.88:<0.18727.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 379 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.22004.1> [rebalance:info,2014-08-19T16:51:24.737,ns_1@10.242.238.88:<0.21874.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[893]}, {checkpoints,[{893,1}]}, {name,<<"rebalance_893">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[893]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"893"}]} [ns_server:debug,2014-08-19T16:51:24.737,ns_1@10.242.238.88:<0.19191.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 373 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.22007.1> [ns_server:debug,2014-08-19T16:51:24.737,ns_1@10.242.238.88:<0.19346.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 371 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.22006.1> [rebalance:info,2014-08-19T16:51:24.737,ns_1@10.242.238.88:<0.18825.1>:ns_single_vbucket_mover:mover_inner:272]Going to do takeover [ns_server:info,2014-08-19T16:51:24.738,ns_1@10.242.238.88:<0.22003.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 374 to state replica [ns_server:info,2014-08-19T16:51:24.738,ns_1@10.242.238.88:<0.22002.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 369 to state replica [rebalance:debug,2014-08-19T16:51:24.739,ns_1@10.242.238.88:<0.21874.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22009.1> [ns_server:debug,2014-08-19T16:51:24.741,ns_1@10.242.238.88:<0.18881.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 377 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.22008.1> [ns_server:debug,2014-08-19T16:51:24.741,ns_1@10.242.238.88:<0.19026.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 375 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.89': <0.22005.1> [ns_server:info,2014-08-19T16:51:24.741,ns_1@10.242.238.88:<0.22004.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 379 to state replica [ns_server:info,2014-08-19T16:51:24.742,ns_1@10.242.238.88:<0.22006.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 371 to state replica [ns_server:info,2014-08-19T16:51:24.742,ns_1@10.242.238.88:<0.22007.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 373 to state replica [ns_server:debug,2014-08-19T16:51:24.743,ns_1@10.242.238.88:<0.18825.1>:ns_single_vbucket_mover:spawn_ebucketmigrator_mover:374]Spawned mover "default" 889 'ns_1@10.242.238.88' -> 'ns_1@10.242.238.91': <0.22010.1> [ns_server:debug,2014-08-19T16:51:24.746,ns_1@10.242.238.88:<0.21954.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_626 [rebalance:debug,2014-08-19T16:51:24.747,ns_1@10.242.238.88:<0.21873.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [ns_server:info,2014-08-19T16:51:24.747,ns_1@10.242.238.88:<0.22005.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 375 to state replica [ns_server:info,2014-08-19T16:51:24.747,ns_1@10.242.238.88:<0.22008.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.89",11209} vbucket 377 to state replica [rebalance:info,2014-08-19T16:51:24.747,ns_1@10.242.238.88:<0.21873.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.747,ns_1@10.242.238.88:<0.21879.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[895]}, {checkpoints,[{895,1}]}, {name,<<"rebalance_895">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[895]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"895"}]} [rebalance:info,2014-08-19T16:51:24.748,ns_1@10.242.238.88:<0.21874.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:51:24.749,ns_1@10.242.238.88:<0.22010.1>:ebucketmigrator_srv:init:544]Setting {"10.242.238.91",11209} vbucket 889 to state replica [rebalance:debug,2014-08-19T16:51:24.749,ns_1@10.242.238.88:<0.21879.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22011.1> [rebalance:info,2014-08-19T16:51:24.749,ns_1@10.242.238.88:<0.18685.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 891 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:info,2014-08-19T16:51:24.749,ns_1@10.242.238.88:<0.21954.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[626]}, {checkpoints,[{626,1}]}, {name,<<"rebalance_626">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[626]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"626"}]} [rebalance:debug,2014-08-19T16:51:24.750,ns_1@10.242.238.88:<0.21874.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.750,ns_1@10.242.238.88:<0.21879.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:51:24.750,ns_1@10.242.238.88:<0.21874.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:debug,2014-08-19T16:51:24.750,ns_1@10.242.238.88:<0.21954.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22012.1> [rebalance:debug,2014-08-19T16:51:24.751,ns_1@10.242.238.88:<0.18693.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:24.752,ns_1@10.242.238.88:<0.21879.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.752,ns_1@10.242.238.88:<0.21879.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.752,ns_1@10.242.238.88:<0.18545.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 893 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:24.752,ns_1@10.242.238.88:<0.21959.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_632 [rebalance:info,2014-08-19T16:51:24.752,ns_1@10.242.238.88:<0.21954.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:51:24.752,ns_1@10.242.238.88:<0.21948.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_624 [rebalance:info,2014-08-19T16:51:24.753,ns_1@10.242.238.88:<0.18426.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 895 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:24.755,ns_1@10.242.238.88:<0.18434.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:24.755,ns_1@10.242.238.88:<0.18553.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:24.756,ns_1@10.242.238.88:<0.21954.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.756,ns_1@10.242.238.88:<0.21954.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.756,ns_1@10.242.238.88:<0.21959.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[632]}, {checkpoints,[{632,1}]}, {name,<<"rebalance_632">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[632]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"632"}]} [rebalance:info,2014-08-19T16:51:24.757,ns_1@10.242.238.88:<0.21948.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[624]}, {checkpoints,[{624,1}]}, {name,<<"rebalance_624">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[624]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"624"}]} [ns_server:info,2014-08-19T16:51:24.757,ns_1@10.242.238.88:<0.18693.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_891_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.757,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 891 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:debug,2014-08-19T16:51:24.757,ns_1@10.242.238.88:<0.21959.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22015.1> [rebalance:debug,2014-08-19T16:51:24.757,ns_1@10.242.238.88:<0.21948.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22018.1> [rebalance:info,2014-08-19T16:51:24.757,ns_1@10.242.238.88:<0.22017.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 891 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:51:24.757,ns_1@10.242.238.88:<0.19388.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 626 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:51:24.758,ns_1@10.242.238.88:<0.21959.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:info,2014-08-19T16:51:24.758,ns_1@10.242.238.88:<0.21948.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:51:24.761,ns_1@10.242.238.88:<0.21957.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_630 [rebalance:debug,2014-08-19T16:51:24.765,ns_1@10.242.238.88:<0.19396.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:24.765,ns_1@10.242.238.88:<0.18553.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_893_'ns_1@10.242.238.89'">>] [rebalance:debug,2014-08-19T16:51:24.766,ns_1@10.242.238.88:<0.21948.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.766,ns_1@10.242.238.88:<0.21948.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:debug,2014-08-19T16:51:24.770,ns_1@10.242.238.88:<0.21959.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.771,ns_1@10.242.238.88:<0.21959.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.772,ns_1@10.242.238.88:<0.19542.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 624 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:51:24.772,ns_1@10.242.238.88:<0.21957.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[630]}, {checkpoints,[{630,1}]}, {name,<<"rebalance_630">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[630]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"630"}]} [ns_server:info,2014-08-19T16:51:24.772,ns_1@10.242.238.88:<0.18434.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_895_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.772,ns_1@10.242.238.88:<0.18923.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 632 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:24.772,ns_1@10.242.238.88:<0.21957.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22024.1> [ns_server:info,2014-08-19T16:51:24.772,ns_1@10.242.238.88:<0.19396.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_626_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.773,ns_1@10.242.238.88:<0.21957.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:24.773,ns_1@10.242.238.88:<0.19550.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:24.774,ns_1@10.242.238.88:<0.21957.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [ns_server:debug,2014-08-19T16:51:24.775,ns_1@10.242.238.88:<0.21958.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_874 [rebalance:debug,2014-08-19T16:51:24.775,ns_1@10.242.238.88:<0.18931.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:24.775,ns_1@10.242.238.88:<0.21957.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.775,ns_1@10.242.238.88:<0.19090.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 630 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:info,2014-08-19T16:51:24.776,ns_1@10.242.238.88:<0.21958.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[874]}, {checkpoints,[{874,1}]}, {name,<<"rebalance_874">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[874]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"874"}]} [rebalance:debug,2014-08-19T16:51:24.776,ns_1@10.242.238.88:<0.21958.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22027.1> [ns_server:info,2014-08-19T16:51:24.776,ns_1@10.242.238.88:<0.19550.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_624_'ns_1@10.242.238.89'">>] [rebalance:debug,2014-08-19T16:51:24.777,ns_1@10.242.238.88:<0.19098.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:24.777,ns_1@10.242.238.88:<0.21958.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:51:24.777,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:51:24.778,ns_1@10.242.238.88:<0.18931.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_632_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:24.778,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.778,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{891, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:24.779,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.779,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:debug,2014-08-19T16:51:24.781,ns_1@10.242.238.88:<0.21958.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.781,ns_1@10.242.238.88:<0.21958.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.782,ns_1@10.242.238.88:<0.20060.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 874 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:info,2014-08-19T16:51:24.783,ns_1@10.242.238.88:<0.19098.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_630_'ns_1@10.242.238.89'">>] [rebalance:debug,2014-08-19T16:51:24.785,ns_1@10.242.238.88:<0.20069.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.785,ns_1@10.242.238.88:<0.21956.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_628 [rebalance:info,2014-08-19T16:51:24.787,ns_1@10.242.238.88:<0.21956.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[628]}, {checkpoints,[{628,1}]}, {name,<<"rebalance_628">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[628]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"628"}]} [rebalance:debug,2014-08-19T16:51:24.787,ns_1@10.242.238.88:<0.21956.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22041.1> [rebalance:info,2014-08-19T16:51:24.788,ns_1@10.242.238.88:<0.21956.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:24.789,ns_1@10.242.238.88:<0.21956.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.790,ns_1@10.242.238.88:<0.21956.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:51:24.790,ns_1@10.242.238.88:<0.20069.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_874_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.790,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 891 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:51:24.790,ns_1@10.242.238.88:<0.19248.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 628 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:24.790,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 891) [ns_server:debug,2014-08-19T16:51:24.791,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:24.791,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 893 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:24.791,ns_1@10.242.238.88:<0.22046.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 893 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:debug,2014-08-19T16:51:24.792,ns_1@10.242.238.88:<0.19256.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.794,ns_1@10.242.238.88:<0.21952.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_622 [ns_server:info,2014-08-19T16:51:24.796,ns_1@10.242.238.88:<0.19256.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_628_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.796,ns_1@10.242.238.88:<0.21952.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[622]}, {checkpoints,[{622,1}]}, {name,<<"rebalance_622">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[622]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"622"}]} [rebalance:debug,2014-08-19T16:51:24.797,ns_1@10.242.238.88:<0.21952.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22049.1> [rebalance:info,2014-08-19T16:51:24.797,ns_1@10.242.238.88:<0.21952.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:24.799,ns_1@10.242.238.88:<0.21952.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.799,ns_1@10.242.238.88:<0.21952.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.800,ns_1@10.242.238.88:<0.19682.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 622 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:24.801,ns_1@10.242.238.88:<0.19690.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.803,ns_1@10.242.238.88:<0.21919.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_620 [rebalance:info,2014-08-19T16:51:24.805,ns_1@10.242.238.88:<0.21919.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[620]}, {checkpoints,[{620,1}]}, {name,<<"rebalance_620">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[620]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"620"}]} [ns_server:info,2014-08-19T16:51:24.805,ns_1@10.242.238.88:<0.19690.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_622_'ns_1@10.242.238.89'">>] [rebalance:debug,2014-08-19T16:51:24.805,ns_1@10.242.238.88:<0.21919.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22052.1> [rebalance:info,2014-08-19T16:51:24.806,ns_1@10.242.238.88:<0.21919.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:24.808,ns_1@10.242.238.88:<0.21919.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.809,ns_1@10.242.238.88:<0.21919.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.810,ns_1@10.242.238.88:<0.19855.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 620 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:24.811,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:24.811,ns_1@10.242.238.88:<0.19863.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.812,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.812,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:24.812,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{893, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:24.816,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:51:24.821,ns_1@10.242.238.88:<0.19863.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_620_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:24.821,ns_1@10.242.238.88:<0.21992.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_879 [views:debug,2014-08-19T16:51:24.824,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/891. Updated state: dead (0) [ns_server:debug,2014-08-19T16:51:24.824,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",891,dead,0} [rebalance:info,2014-08-19T16:51:24.824,ns_1@10.242.238.88:<0.21992.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[879]}, {checkpoints,[{879,1}]}, {name,<<"rebalance_879">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[879]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"879"}]} [rebalance:debug,2014-08-19T16:51:24.825,ns_1@10.242.238.88:<0.21992.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22063.1> [rebalance:info,2014-08-19T16:51:24.825,ns_1@10.242.238.88:<0.21992.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:24.827,ns_1@10.242.238.88:<0.21992.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.827,ns_1@10.242.238.88:<0.21992.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.828,ns_1@10.242.238.88:<0.19584.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 879 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:24.830,ns_1@10.242.238.88:<0.21985.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_366 [rebalance:info,2014-08-19T16:51:24.831,ns_1@10.242.238.88:<0.21985.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[366]}, {checkpoints,[{366,1}]}, {name,<<"rebalance_366">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[366]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"366"}]} [rebalance:debug,2014-08-19T16:51:24.832,ns_1@10.242.238.88:<0.19592.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:24.832,ns_1@10.242.238.88:<0.21985.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22064.1> [rebalance:info,2014-08-19T16:51:24.833,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 893 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:24.834,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 893) [ns_server:debug,2014-08-19T16:51:24.834,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:24.834,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 895 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:24.835,ns_1@10.242.238.88:<0.22067.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 895 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:51:24.835,ns_1@10.242.238.88:<0.21985.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:51:24.836,ns_1@10.242.238.88:<0.19592.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_879_'ns_1@10.242.238.89'">>] [rebalance:debug,2014-08-19T16:51:24.837,ns_1@10.242.238.88:<0.21985.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.837,ns_1@10.242.238.88:<0.21985.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.838,ns_1@10.242.238.88:<0.19704.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 366 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:24.842,ns_1@10.242.238.88:<0.21962.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_876 [rebalance:info,2014-08-19T16:51:24.843,ns_1@10.242.238.88:<0.21962.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[876]}, {checkpoints,[{876,1}]}, {name,<<"rebalance_876">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[876]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"876"}]} [rebalance:debug,2014-08-19T16:51:24.844,ns_1@10.242.238.88:<0.21962.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22070.1> [rebalance:info,2014-08-19T16:51:24.845,ns_1@10.242.238.88:<0.21962.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:24.847,ns_1@10.242.238.88:<0.21962.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.847,ns_1@10.242.238.88:<0.21962.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.848,ns_1@10.242.238.88:<0.19826.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 876 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:24.850,ns_1@10.242.238.88:<0.22006.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_371 [rebalance:debug,2014-08-19T16:51:24.850,ns_1@10.242.238.88:<0.19835.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:24.851,ns_1@10.242.238.88:<0.22006.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[371]}, {checkpoints,[{371,1}]}, {name,<<"rebalance_371">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[371]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"371"}]} [rebalance:debug,2014-08-19T16:51:24.852,ns_1@10.242.238.88:<0.22006.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22071.1> [rebalance:info,2014-08-19T16:51:24.853,ns_1@10.242.238.88:<0.22006.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:51:24.854,ns_1@10.242.238.88:<0.19835.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_876_'ns_1@10.242.238.89'">>] [rebalance:debug,2014-08-19T16:51:24.856,ns_1@10.242.238.88:<0.22006.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.856,ns_1@10.242.238.88:<0.22006.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:51:24.856,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:24.857,ns_1@10.242.238.88:<0.19346.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 371 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:24.857,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:24.857,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{895, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:24.858,ns_1@10.242.238.88:<0.19713.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.858,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.858,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:24.859,ns_1@10.242.238.88:<0.19354.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:24.861,ns_1@10.242.238.88:<0.19713.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_366_'ns_1@10.242.238.90'">>] [ns_server:info,2014-08-19T16:51:24.862,ns_1@10.242.238.88:<0.19354.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_371_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:51:24.864,ns_1@10.242.238.88:<0.21963.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_623 [rebalance:info,2014-08-19T16:51:24.865,ns_1@10.242.238.88:<0.21963.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[623]}, {checkpoints,[{623,1}]}, {name,<<"rebalance_623">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[623]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"623"}]} [rebalance:info,2014-08-19T16:51:24.865,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 895 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:debug,2014-08-19T16:51:24.866,ns_1@10.242.238.88:<0.21963.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22087.1> [ns_server:debug,2014-08-19T16:51:24.866,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 895) [rebalance:info,2014-08-19T16:51:24.866,ns_1@10.242.238.88:<0.21963.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:51:24.867,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:24.867,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 626 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:24.867,ns_1@10.242.238.88:<0.22089.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 626 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:debug,2014-08-19T16:51:24.868,ns_1@10.242.238.88:<0.21963.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.868,ns_1@10.242.238.88:<0.21963.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.869,ns_1@10.242.238.88:<0.19605.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 623 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:24.870,ns_1@10.242.238.88:<0.19613.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:24.873,ns_1@10.242.238.88:<0.19613.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_623_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:24.875,ns_1@10.242.238.88:<0.21989.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_376 [rebalance:info,2014-08-19T16:51:24.876,ns_1@10.242.238.88:<0.21989.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[376]}, {checkpoints,[{376,1}]}, {name,<<"rebalance_376">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[376]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"376"}]} [rebalance:debug,2014-08-19T16:51:24.876,ns_1@10.242.238.88:<0.21989.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22092.1> [rebalance:info,2014-08-19T16:51:24.877,ns_1@10.242.238.88:<0.21989.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:24.878,ns_1@10.242.238.88:<0.21989.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.879,ns_1@10.242.238.88:<0.21989.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.880,ns_1@10.242.238.88:<0.18944.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 376 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:24.884,ns_1@10.242.238.88:<0.21981.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_877 [rebalance:info,2014-08-19T16:51:24.886,ns_1@10.242.238.88:<0.21981.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[877]}, {checkpoints,[{877,1}]}, {name,<<"rebalance_877">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[877]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"877"}]} [rebalance:debug,2014-08-19T16:51:24.886,ns_1@10.242.238.88:<0.18952.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:24.886,ns_1@10.242.238.88:<0.21981.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22093.1> [ns_server:debug,2014-08-19T16:51:24.887,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.887,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:24.887,ns_1@10.242.238.88:<0.21981.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:51:24.888,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:24.888,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{626, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:24.888,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:24.890,ns_1@10.242.238.88:<0.21981.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.890,ns_1@10.242.238.88:<0.21981.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:51:24.891,ns_1@10.242.238.88:<0.18952.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_376_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:24.891,ns_1@10.242.238.88:<0.19734.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 877 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [views:debug,2014-08-19T16:51:24.892,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/895. Updated state: dead (0) [ns_server:debug,2014-08-19T16:51:24.892,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",895,dead,0} [rebalance:debug,2014-08-19T16:51:24.894,ns_1@10.242.238.88:<0.19743.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:24.895,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 626 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:24.895,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 626) [ns_server:debug,2014-08-19T16:51:24.896,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:24.896,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 624 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:24.896,ns_1@10.242.238.88:<0.22106.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 624 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [ns_server:info,2014-08-19T16:51:24.898,ns_1@10.242.238.88:<0.19743.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_877_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:24.900,ns_1@10.242.238.88:<0.21986.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_372 [rebalance:info,2014-08-19T16:51:24.901,ns_1@10.242.238.88:<0.21986.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[372]}, {checkpoints,[{372,1}]}, {name,<<"rebalance_372">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[372]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"372"}]} [rebalance:debug,2014-08-19T16:51:24.901,ns_1@10.242.238.88:<0.21986.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22109.1> [rebalance:info,2014-08-19T16:51:24.902,ns_1@10.242.238.88:<0.21986.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:24.903,ns_1@10.242.238.88:<0.21986.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.904,ns_1@10.242.238.88:<0.21986.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.904,ns_1@10.242.238.88:<0.19269.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 372 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:24.908,ns_1@10.242.238.88:<0.21998.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_885 [rebalance:info,2014-08-19T16:51:24.910,ns_1@10.242.238.88:<0.21998.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[885]}, {checkpoints,[{885,1}]}, {name,<<"rebalance_885">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[885]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"885"}]} [rebalance:debug,2014-08-19T16:51:24.911,ns_1@10.242.238.88:<0.21998.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22110.1> [rebalance:info,2014-08-19T16:51:24.912,ns_1@10.242.238.88:<0.21998.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:24.915,ns_1@10.242.238.88:<0.21998.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.915,ns_1@10.242.238.88:<0.21998.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:debug,2014-08-19T16:51:24.916,ns_1@10.242.238.88:<0.19277.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.916,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:24.916,ns_1@10.242.238.88:<0.19135.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 885 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:24.917,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:24.917,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.918,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{624, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:24.918,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:24.918,ns_1@10.242.238.88:<0.19143.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.919,ns_1@10.242.238.88:<0.21997.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_367 [ns_server:info,2014-08-19T16:51:24.919,ns_1@10.242.238.88:<0.19277.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_372_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:24.922,ns_1@10.242.238.88:<0.21997.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[367]}, {checkpoints,[{367,1}]}, {name,<<"rebalance_367">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[367]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"367"}]} [rebalance:debug,2014-08-19T16:51:24.922,ns_1@10.242.238.88:<0.21997.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22121.1> [rebalance:info,2014-08-19T16:51:24.923,ns_1@10.242.238.88:<0.21997.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:51:24.924,ns_1@10.242.238.88:<0.19143.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_885_'ns_1@10.242.238.89'">>] [rebalance:debug,2014-08-19T16:51:24.925,ns_1@10.242.238.88:<0.21997.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.925,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 624 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:info,2014-08-19T16:51:24.925,ns_1@10.242.238.88:<0.21997.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:51:24.926,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 624) [rebalance:info,2014-08-19T16:51:24.926,ns_1@10.242.238.88:<0.19640.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 367 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:24.927,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:24.927,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 632 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:24.927,ns_1@10.242.238.88:<0.22126.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 632 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:debug,2014-08-19T16:51:24.928,ns_1@10.242.238.88:<0.19648.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:24.931,ns_1@10.242.238.88:<0.19648.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_367_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:51:24.931,ns_1@10.242.238.88:<0.21991.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_363 [rebalance:info,2014-08-19T16:51:24.933,ns_1@10.242.238.88:<0.21991.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[363]}, {checkpoints,[{363,1}]}, {name,<<"rebalance_363">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[363]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"363"}]} [rebalance:debug,2014-08-19T16:51:24.933,ns_1@10.242.238.88:<0.21991.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22129.1> [rebalance:info,2014-08-19T16:51:24.934,ns_1@10.242.238.88:<0.21991.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:24.936,ns_1@10.242.238.88:<0.21991.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.936,ns_1@10.242.238.88:<0.21991.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.938,ns_1@10.242.238.88:<0.20032.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 363 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:24.941,ns_1@10.242.238.88:<0.21961.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_878 [rebalance:info,2014-08-19T16:51:24.943,ns_1@10.242.238.88:<0.21961.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[878]}, {checkpoints,[{878,1}]}, {name,<<"rebalance_878">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[878]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"878"}]} [rebalance:debug,2014-08-19T16:51:24.943,ns_1@10.242.238.88:<0.21961.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22130.1> [rebalance:info,2014-08-19T16:51:24.944,ns_1@10.242.238.88:<0.21961.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:24.946,ns_1@10.242.238.88:<0.21961.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.946,ns_1@10.242.238.88:<0.21961.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:debug,2014-08-19T16:51:24.946,ns_1@10.242.238.88:<0.20040.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:24.947,ns_1@10.242.238.88:<0.19661.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 878 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:24.947,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.948,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:24.948,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.948,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{632, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:24.948,ns_1@10.242.238.88:<0.19669.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:24.949,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:info,2014-08-19T16:51:24.950,ns_1@10.242.238.88:<0.20040.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_363_'ns_1@10.242.238.90'">>] [ns_server:info,2014-08-19T16:51:24.951,ns_1@10.242.238.88:<0.19669.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_878_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:24.953,ns_1@10.242.238.88:<0.21979.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_888 [rebalance:info,2014-08-19T16:51:24.954,ns_1@10.242.238.88:<0.21979.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[888]}, {checkpoints,[{888,1}]}, {name,<<"rebalance_888">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[888]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"888"}]} [rebalance:debug,2014-08-19T16:51:24.955,ns_1@10.242.238.88:<0.21979.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22142.1> [rebalance:info,2014-08-19T16:51:24.956,ns_1@10.242.238.88:<0.21979.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:24.960,ns_1@10.242.238.88:<0.21979.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.960,ns_1@10.242.238.88:<0.21979.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.960,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 632 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:24.960,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 632) [rebalance:info,2014-08-19T16:51:24.961,ns_1@10.242.238.88:<0.18902.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 888 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:24.961,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:24.961,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 630 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:24.961,ns_1@10.242.238.88:<0.22146.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 630 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:debug,2014-08-19T16:51:24.962,ns_1@10.242.238.88:<0.18910.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:24.965,ns_1@10.242.238.88:<0.18910.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_888_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:24.967,ns_1@10.242.238.88:<0.21980.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_884 [rebalance:info,2014-08-19T16:51:24.968,ns_1@10.242.238.88:<0.21980.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[884]}, {checkpoints,[{884,1}]}, {name,<<"rebalance_884">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[884]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"884"}]} [rebalance:debug,2014-08-19T16:51:24.969,ns_1@10.242.238.88:<0.21980.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22149.1> [rebalance:info,2014-08-19T16:51:24.970,ns_1@10.242.238.88:<0.21980.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:24.971,ns_1@10.242.238.88:<0.21980.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.971,ns_1@10.242.238.88:<0.21980.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.972,ns_1@10.242.238.88:<0.19213.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 884 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:24.974,ns_1@10.242.238.88:<0.19221.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [views:debug,2014-08-19T16:51:24.974,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/893. Updated state: dead (0) [ns_server:debug,2014-08-19T16:51:24.974,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",893,dead,0} [ns_server:debug,2014-08-19T16:51:24.975,ns_1@10.242.238.88:<0.21978.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_633 [rebalance:info,2014-08-19T16:51:24.976,ns_1@10.242.238.88:<0.21978.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[633]}, {checkpoints,[{633,1}]}, {name,<<"rebalance_633">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[633]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"633"}]} [rebalance:debug,2014-08-19T16:51:24.977,ns_1@10.242.238.88:<0.21978.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22151.1> [ns_server:info,2014-08-19T16:51:24.977,ns_1@10.242.238.88:<0.19221.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_884_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.979,ns_1@10.242.238.88:<0.21978.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:24.980,ns_1@10.242.238.88:<0.21978.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.980,ns_1@10.242.238.88:<0.21978.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.982,ns_1@10.242.238.88:<0.18846.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 633 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:24.983,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.984,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:24.984,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:24.984,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{630, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:24.985,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:24.985,ns_1@10.242.238.88:<0.18854.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:24.988,ns_1@10.242.238.88:<0.18854.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_633_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:24.992,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 630 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:24.993,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 630) [ns_server:debug,2014-08-19T16:51:24.993,ns_1@10.242.238.88:<0.22010.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_889 [ns_server:debug,2014-08-19T16:51:24.993,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:24.993,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 874 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:24.993,ns_1@10.242.238.88:<0.22165.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 874 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:51:24.994,ns_1@10.242.238.88:<0.22010.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[889]}, {checkpoints,[{889,1}]}, {name,<<"rebalance_889">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[889]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"889"}]} [rebalance:debug,2014-08-19T16:51:24.994,ns_1@10.242.238.88:<0.22010.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22166.1> [rebalance:info,2014-08-19T16:51:24.995,ns_1@10.242.238.88:<0.22010.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:24.996,ns_1@10.242.238.88:<0.22010.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:24.997,ns_1@10.242.238.88:<0.22010.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:24.998,ns_1@10.242.238.88:<0.18825.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 889 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:24.998,ns_1@10.242.238.88:<0.22008.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_377 [rebalance:debug,2014-08-19T16:51:24.999,ns_1@10.242.238.88:<0.18833.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:24.999,ns_1@10.242.238.88:<0.22008.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[377]}, {checkpoints,[{377,1}]}, {name,<<"rebalance_377">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[377]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"377"}]} [rebalance:debug,2014-08-19T16:51:25.000,ns_1@10.242.238.88:<0.22008.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22167.1> [rebalance:info,2014-08-19T16:51:25.004,ns_1@10.242.238.88:<0.22008.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:25.005,ns_1@10.242.238.88:<0.22008.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:25.006,ns_1@10.242.238.88:<0.22008.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:info,2014-08-19T16:51:25.006,ns_1@10.242.238.88:<0.18833.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_889_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:25.006,ns_1@10.242.238.88:<0.18881.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 377 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:25.014,ns_1@10.242.238.88:<0.21987.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_886 [ns_server:debug,2014-08-19T16:51:25.015,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:25.015,ns_1@10.242.238.88:<0.18889.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:25.015,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:51:25.016,ns_1@10.242.238.88:<0.21987.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[886]}, {checkpoints,[{886,1}]}, {name,<<"rebalance_886">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[886]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"886"}]} [ns_server:debug,2014-08-19T16:51:25.016,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.016,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{874, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.017,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:25.017,ns_1@10.242.238.88:<0.21987.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22173.1> [rebalance:info,2014-08-19T16:51:25.018,ns_1@10.242.238.88:<0.21987.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:25.019,ns_1@10.242.238.88:<0.21987.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [ns_server:info,2014-08-19T16:51:25.020,ns_1@10.242.238.88:<0.18889.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_377_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:25.020,ns_1@10.242.238.88:<0.21987.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.020,ns_1@10.242.238.88:<0.19047.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 886 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:25.022,ns_1@10.242.238.88:<0.22003.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_374 [rebalance:debug,2014-08-19T16:51:25.023,ns_1@10.242.238.88:<0.19055.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:25.024,ns_1@10.242.238.88:<0.22003.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[374]}, {checkpoints,[{374,1}]}, {name,<<"rebalance_374">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[374]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"374"}]} [rebalance:debug,2014-08-19T16:51:25.025,ns_1@10.242.238.88:<0.22003.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22181.1> [rebalance:info,2014-08-19T16:51:25.026,ns_1@10.242.238.88:<0.22003.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:51:25.026,ns_1@10.242.238.88:<0.19055.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_886_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:25.027,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 874 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:25.027,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 874) [ns_server:debug,2014-08-19T16:51:25.028,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:debug,2014-08-19T16:51:25.028,ns_1@10.242.238.88:<0.22003.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:25.028,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 628 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:25.028,ns_1@10.242.238.88:<0.22003.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.028,ns_1@10.242.238.88:<0.22186.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 628 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:info,2014-08-19T16:51:25.029,ns_1@10.242.238.88:<0.19111.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 374 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [views:debug,2014-08-19T16:51:25.033,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/888. Updated state: dead (0) [ns_server:debug,2014-08-19T16:51:25.033,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",888,dead,0} [ns_server:debug,2014-08-19T16:51:25.034,ns_1@10.242.238.88:<0.21993.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_887 [ns_server:debug,2014-08-19T16:51:25.044,ns_1@10.242.238.88:<0.21984.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_370 [rebalance:info,2014-08-19T16:51:25.044,ns_1@10.242.238.88:<0.21993.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[887]}, {checkpoints,[{887,1}]}, {name,<<"rebalance_887">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[887]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"887"}]} [rebalance:debug,2014-08-19T16:51:25.045,ns_1@10.242.238.88:<0.21993.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22187.1> [rebalance:info,2014-08-19T16:51:25.045,ns_1@10.242.238.88:<0.21984.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[370]}, {checkpoints,[{370,1}]}, {name,<<"rebalance_370">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[370]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"370"}]} [rebalance:info,2014-08-19T16:51:25.046,ns_1@10.242.238.88:<0.21993.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:25.046,ns_1@10.242.238.88:<0.21984.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22188.1> [ns_server:debug,2014-08-19T16:51:25.048,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:25.049,ns_1@10.242.238.88:<0.21993.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [ns_server:debug,2014-08-19T16:51:25.049,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:25.049,ns_1@10.242.238.88:<0.19119.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:25.049,ns_1@10.242.238.88:<0.21993.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.049,ns_1@10.242.238.88:<0.21984.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:debug,2014-08-19T16:51:25.049,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:25.049,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.049,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{628, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:51:25.050,ns_1@10.242.238.88:<0.18970.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 887 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:25.051,ns_1@10.242.238.88:<0.21984.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:25.051,ns_1@10.242.238.88:<0.21984.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.052,ns_1@10.242.238.88:<0.19409.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 370 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:info,2014-08-19T16:51:25.052,ns_1@10.242.238.88:<0.19119.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_374_'ns_1@10.242.238.90'">>] [rebalance:debug,2014-08-19T16:51:25.053,ns_1@10.242.238.88:<0.18978.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:debug,2014-08-19T16:51:25.054,ns_1@10.242.238.88:<0.19417.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:25.057,ns_1@10.242.238.88:<0.18978.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_887_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:25.058,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 628 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:25.058,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 628) [ns_server:info,2014-08-19T16:51:25.058,ns_1@10.242.238.88:<0.19417.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_370_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:51:25.059,ns_1@10.242.238.88:<0.21974.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_629 [ns_server:debug,2014-08-19T16:51:25.059,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:25.060,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 622 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:25.060,ns_1@10.242.238.88:<0.22205.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 622 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:info,2014-08-19T16:51:25.060,ns_1@10.242.238.88:<0.21974.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[629]}, {checkpoints,[{629,1}]}, {name,<<"rebalance_629">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[629]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"629"}]} [rebalance:debug,2014-08-19T16:51:25.061,ns_1@10.242.238.88:<0.21974.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22206.1> [rebalance:info,2014-08-19T16:51:25.062,ns_1@10.242.238.88:<0.21974.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:25.064,ns_1@10.242.238.88:<0.21974.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:25.064,ns_1@10.242.238.88:<0.21974.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.065,ns_1@10.242.238.88:<0.19161.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 629 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:25.067,ns_1@10.242.238.88:<0.19172.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:25.067,ns_1@10.242.238.88:<0.21990.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_875 [rebalance:info,2014-08-19T16:51:25.069,ns_1@10.242.238.88:<0.21990.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[875]}, {checkpoints,[{875,1}]}, {name,<<"rebalance_875">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[875]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"875"}]} [rebalance:debug,2014-08-19T16:51:25.069,ns_1@10.242.238.88:<0.21990.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22208.1> [ns_server:info,2014-08-19T16:51:25.070,ns_1@10.242.238.88:<0.19172.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_629_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:25.071,ns_1@10.242.238.88:<0.21990.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:25.073,ns_1@10.242.238.88:<0.21990.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:25.073,ns_1@10.242.238.88:<0.21990.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.074,ns_1@10.242.238.88:<0.19960.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 875 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:25.076,ns_1@10.242.238.88:<0.19973.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:25.079,ns_1@10.242.238.88:<0.19973.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_875_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:25.079,ns_1@10.242.238.88:<0.21988.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_881 [ns_server:debug,2014-08-19T16:51:25.081,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:25.081,ns_1@10.242.238.88:<0.21988.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[881]}, {checkpoints,[{881,1}]}, {name,<<"rebalance_881">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[881]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"881"}]} [ns_server:debug,2014-08-19T16:51:25.082,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:25.082,ns_1@10.242.238.88:<0.21988.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22215.1> [ns_server:debug,2014-08-19T16:51:25.083,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{622, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.083,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:25.083,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:25.086,ns_1@10.242.238.88:<0.21988.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:25.087,ns_1@10.242.238.88:<0.21988.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:25.088,ns_1@10.242.238.88:<0.21988.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.088,ns_1@10.242.238.88:<0.19444.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 881 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:info,2014-08-19T16:51:25.090,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 622 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:debug,2014-08-19T16:51:25.090,ns_1@10.242.238.88:<0.19452.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:25.091,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 622) [ns_server:debug,2014-08-19T16:51:25.091,ns_1@10.242.238.88:<0.21964.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_621 [ns_server:debug,2014-08-19T16:51:25.091,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [views:debug,2014-08-19T16:51:25.091,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/884. Updated state: dead (0) [ns_server:debug,2014-08-19T16:51:25.091,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",884,dead,0} [rebalance:info,2014-08-19T16:51:25.091,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 620 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:25.092,ns_1@10.242.238.88:<0.22223.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 620 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:info,2014-08-19T16:51:25.092,ns_1@10.242.238.88:<0.21964.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[621]}, {checkpoints,[{621,1}]}, {name,<<"rebalance_621">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[621]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"621"}]} [rebalance:debug,2014-08-19T16:51:25.093,ns_1@10.242.238.88:<0.21964.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22224.1> [ns_server:info,2014-08-19T16:51:25.094,ns_1@10.242.238.88:<0.19452.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_881_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:25.094,ns_1@10.242.238.88:<0.21964.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:25.096,ns_1@10.242.238.88:<0.21964.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:25.096,ns_1@10.242.238.88:<0.21964.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.097,ns_1@10.242.238.88:<0.19765.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 621 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:25.099,ns_1@10.242.238.88:<0.19773.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:25.102,ns_1@10.242.238.88:<0.21995.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_883 [ns_server:info,2014-08-19T16:51:25.103,ns_1@10.242.238.88:<0.19773.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_621_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:25.105,ns_1@10.242.238.88:<0.21995.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[883]}, {checkpoints,[{883,1}]}, {name,<<"rebalance_883">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[883]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"883"}]} [rebalance:debug,2014-08-19T16:51:25.107,ns_1@10.242.238.88:<0.21995.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22229.1> [rebalance:info,2014-08-19T16:51:25.109,ns_1@10.242.238.88:<0.21995.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:25.111,ns_1@10.242.238.88:<0.21995.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:25.111,ns_1@10.242.238.88:<0.21995.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.112,ns_1@10.242.238.88:<0.19290.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 883 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:25.112,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.113,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:25.113,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.114,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.114,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{620, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:25.114,ns_1@10.242.238.88:<0.19298.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:25.119,ns_1@10.242.238.88:<0.22004.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_379 [ns_server:info,2014-08-19T16:51:25.119,ns_1@10.242.238.88:<0.19298.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_883_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:25.120,ns_1@10.242.238.88:<0.22004.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[379]}, {checkpoints,[{379,1}]}, {name,<<"rebalance_379">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[379]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"379"}]} [rebalance:info,2014-08-19T16:51:25.120,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 620 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:25.121,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 620) [rebalance:debug,2014-08-19T16:51:25.121,ns_1@10.242.238.88:<0.22004.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22241.1> [ns_server:debug,2014-08-19T16:51:25.121,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:25.121,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 879 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:25.121,ns_1@10.242.238.88:<0.22243.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 879 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:info,2014-08-19T16:51:25.121,ns_1@10.242.238.88:<0.22004.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:25.123,ns_1@10.242.238.88:<0.22004.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:25.123,ns_1@10.242.238.88:<0.22004.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.124,ns_1@10.242.238.88:<0.18727.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 379 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:25.124,ns_1@10.242.238.88:<0.21971.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_882 [views:debug,2014-08-19T16:51:25.125,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/878. Updated state: dead (0) [ns_server:debug,2014-08-19T16:51:25.125,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",878,dead,0} [rebalance:info,2014-08-19T16:51:25.126,ns_1@10.242.238.88:<0.21971.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[882]}, {checkpoints,[{882,1}]}, {name,<<"rebalance_882">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[882]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"882"}]} [rebalance:debug,2014-08-19T16:51:25.126,ns_1@10.242.238.88:<0.21971.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22244.1> [rebalance:info,2014-08-19T16:51:25.127,ns_1@10.242.238.88:<0.21971.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:25.129,ns_1@10.242.238.88:<0.21971.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:25.129,ns_1@10.242.238.88:<0.21971.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.130,ns_1@10.242.238.88:<0.19367.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 882 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:25.131,ns_1@10.242.238.88:<0.19375.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:25.134,ns_1@10.242.238.88:<0.19375.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_882_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:25.135,ns_1@10.242.238.88:<0.21982.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_364 [rebalance:info,2014-08-19T16:51:25.137,ns_1@10.242.238.88:<0.21982.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[364]}, {checkpoints,[{364,1}]}, {name,<<"rebalance_364">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[364]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"364"}]} [rebalance:debug,2014-08-19T16:51:25.138,ns_1@10.242.238.88:<0.21982.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22247.1> [rebalance:info,2014-08-19T16:51:25.138,ns_1@10.242.238.88:<0.21982.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:25.140,ns_1@10.242.238.88:<0.21982.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:25.140,ns_1@10.242.238.88:<0.21982.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [ns_server:debug,2014-08-19T16:51:25.142,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:25.142,ns_1@10.242.238.88:<0.18735.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:25.142,ns_1@10.242.238.88:<0.19883.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 364 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:25.143,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:25.143,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.143,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{879, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.143,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:25.144,ns_1@10.242.238.88:<0.19892.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:25.146,ns_1@10.242.238.88:<0.18735.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_379_'ns_1@10.242.238.90'">>] [ns_server:info,2014-08-19T16:51:25.146,ns_1@10.242.238.88:<0.19892.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_364_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:51:25.148,ns_1@10.242.238.88:<0.21967.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_627 [rebalance:info,2014-08-19T16:51:25.149,ns_1@10.242.238.88:<0.21967.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[627]}, {checkpoints,[{627,1}]}, {name,<<"rebalance_627">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[627]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"627"}]} [rebalance:debug,2014-08-19T16:51:25.150,ns_1@10.242.238.88:<0.21967.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22259.1> [rebalance:info,2014-08-19T16:51:25.151,ns_1@10.242.238.88:<0.21967.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:25.153,ns_1@10.242.238.88:<0.21967.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:25.153,ns_1@10.242.238.88:<0.21967.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.153,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 879 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:25.154,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 879) [rebalance:info,2014-08-19T16:51:25.154,ns_1@10.242.238.88:<0.19325.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 627 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:25.154,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:25.154,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 876 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:25.154,ns_1@10.242.238.88:<0.22263.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 876 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [rebalance:debug,2014-08-19T16:51:25.155,ns_1@10.242.238.88:<0.19333.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:25.158,ns_1@10.242.238.88:<0.19333.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_627_'ns_1@10.242.238.89'">>] [ns_server:debug,2014-08-19T16:51:25.160,ns_1@10.242.238.88:<0.22002.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_369 [rebalance:info,2014-08-19T16:51:25.162,ns_1@10.242.238.88:<0.22002.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[369]}, {checkpoints,[{369,1}]}, {name,<<"rebalance_369">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[369]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"369"}]} [rebalance:debug,2014-08-19T16:51:25.163,ns_1@10.242.238.88:<0.22002.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22271.1> [rebalance:info,2014-08-19T16:51:25.163,ns_1@10.242.238.88:<0.22002.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:25.169,ns_1@10.242.238.88:<0.22002.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:25.169,ns_1@10.242.238.88:<0.22002.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.170,ns_1@10.242.238.88:<0.19486.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 369 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:25.171,ns_1@10.242.238.88:<0.21996.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_378 [rebalance:info,2014-08-19T16:51:25.172,ns_1@10.242.238.88:<0.21996.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[378]}, {checkpoints,[{378,1}]}, {name,<<"rebalance_378">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[378]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"378"}]} [rebalance:debug,2014-08-19T16:51:25.172,ns_1@10.242.238.88:<0.21996.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22272.1> [rebalance:info,2014-08-19T16:51:25.173,ns_1@10.242.238.88:<0.21996.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:25.174,ns_1@10.242.238.88:<0.21996.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:25.174,ns_1@10.242.238.88:<0.21996.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.176,ns_1@10.242.238.88:<0.18804.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 378 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:25.180,ns_1@10.242.238.88:<0.19494.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:25.181,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.181,ns_1@10.242.238.88:<0.21983.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_368 [rebalance:debug,2014-08-19T16:51:25.181,ns_1@10.242.238.88:<0.18812.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:25.181,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:25.182,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:25.182,ns_1@10.242.238.88:<0.21983.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[368]}, {checkpoints,[{368,1}]}, {name,<<"rebalance_368">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[368]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"368"}]} [ns_server:debug,2014-08-19T16:51:25.182,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{876, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:debug,2014-08-19T16:51:25.182,ns_1@10.242.238.88:<0.21983.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22275.1> [ns_server:debug,2014-08-19T16:51:25.182,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:25.183,ns_1@10.242.238.88:<0.21983.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [views:debug,2014-08-19T16:51:25.184,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/876. Updated state: dead (0) [ns_server:debug,2014-08-19T16:51:25.184,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",876,dead,0} [ns_server:info,2014-08-19T16:51:25.185,ns_1@10.242.238.88:<0.19494.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_369_'ns_1@10.242.238.90'">>] [rebalance:debug,2014-08-19T16:51:25.186,ns_1@10.242.238.88:<0.21983.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [ns_server:info,2014-08-19T16:51:25.186,ns_1@10.242.238.88:<0.18812.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_378_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:25.187,ns_1@10.242.238.88:<0.21983.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.188,ns_1@10.242.238.88:<0.19563.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 368 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:info,2014-08-19T16:51:25.190,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 876 done. Will delete it on: ['ns_1@10.242.238.88'] [rebalance:debug,2014-08-19T16:51:25.190,ns_1@10.242.238.88:<0.19571.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:25.190,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 876) [ns_server:debug,2014-08-19T16:51:25.191,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:25.191,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 366 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:25.191,ns_1@10.242.238.88:<0.22288.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 366 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:info,2014-08-19T16:51:25.193,ns_1@10.242.238.88:<0.19571.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_368_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:51:25.193,ns_1@10.242.238.88:<0.21966.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_631 [rebalance:info,2014-08-19T16:51:25.195,ns_1@10.242.238.88:<0.21966.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[631]}, {checkpoints,[{631,1}]}, {name,<<"rebalance_631">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[631]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"631"}]} [rebalance:debug,2014-08-19T16:51:25.195,ns_1@10.242.238.88:<0.21966.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22291.1> [rebalance:info,2014-08-19T16:51:25.196,ns_1@10.242.238.88:<0.21966.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:25.197,ns_1@10.242.238.88:<0.21966.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:25.198,ns_1@10.242.238.88:<0.21966.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.198,ns_1@10.242.238.88:<0.19005.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 631 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:25.202,ns_1@10.242.238.88:<0.21960.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_619 [rebalance:info,2014-08-19T16:51:25.203,ns_1@10.242.238.88:<0.21960.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[619]}, {checkpoints,[{619,1}]}, {name,<<"rebalance_619">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[619]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"619"}]} [rebalance:debug,2014-08-19T16:51:25.204,ns_1@10.242.238.88:<0.21960.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22292.1> [rebalance:info,2014-08-19T16:51:25.205,ns_1@10.242.238.88:<0.21960.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:25.207,ns_1@10.242.238.88:<0.21960.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:25.207,ns_1@10.242.238.88:<0.21960.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.208,ns_1@10.242.238.88:<0.20003.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 619 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:25.213,ns_1@10.242.238.88:<0.22007.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_373 [rebalance:info,2014-08-19T16:51:25.215,ns_1@10.242.238.88:<0.22007.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[373]}, {checkpoints,[{373,1}]}, {name,<<"rebalance_373">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[373]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"373"}]} [ns_server:debug,2014-08-19T16:51:25.216,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.216,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:debug,2014-08-19T16:51:25.217,ns_1@10.242.238.88:<0.22007.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22295.1> [rebalance:debug,2014-08-19T16:51:25.217,ns_1@10.242.238.88:<0.19013.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:25.217,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{366, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.217,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:25.217,ns_1@10.242.238.88:<0.20011.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:25.217,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:25.217,ns_1@10.242.238.88:<0.22007.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:25.219,ns_1@10.242.238.88:<0.22007.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:25.219,ns_1@10.242.238.88:<0.22007.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.221,ns_1@10.242.238.88:<0.19191.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 373 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [ns_server:info,2014-08-19T16:51:25.221,ns_1@10.242.238.88:<0.19013.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_631_'ns_1@10.242.238.89'">>] [ns_server:info,2014-08-19T16:51:25.221,ns_1@10.242.238.88:<0.20011.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_619_'ns_1@10.242.238.89'">>] [rebalance:debug,2014-08-19T16:51:25.222,ns_1@10.242.238.88:<0.19199.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [rebalance:info,2014-08-19T16:51:25.227,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 366 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:25.228,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 366) [ns_server:debug,2014-08-19T16:51:25.229,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [ns_server:debug,2014-08-19T16:51:25.229,ns_1@10.242.238.88:<0.21994.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_365 [rebalance:info,2014-08-19T16:51:25.229,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 371 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:25.229,ns_1@10.242.238.88:<0.22308.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 371 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:info,2014-08-19T16:51:25.229,ns_1@10.242.238.88:<0.19199.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_373_'ns_1@10.242.238.90'">>] [rebalance:info,2014-08-19T16:51:25.230,ns_1@10.242.238.88:<0.21994.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[365]}, {checkpoints,[{365,1}]}, {name,<<"rebalance_365">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[365]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"365"}]} [rebalance:debug,2014-08-19T16:51:25.231,ns_1@10.242.238.88:<0.21994.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22311.1> [rebalance:info,2014-08-19T16:51:25.232,ns_1@10.242.238.88:<0.21994.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:25.234,ns_1@10.242.238.88:<0.21994.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:25.234,ns_1@10.242.238.88:<0.21994.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.235,ns_1@10.242.238.88:<0.19796.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 365 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:25.236,ns_1@10.242.238.88:<0.19805.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:25.239,ns_1@10.242.238.88:<0.19805.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_365_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:51:25.241,ns_1@10.242.238.88:<0.21965.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_880 [rebalance:info,2014-08-19T16:51:25.243,ns_1@10.242.238.88:<0.21965.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[880]}, {checkpoints,[{880,1}]}, {name,<<"rebalance_880">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.91",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[880]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"880"}]} [views:debug,2014-08-19T16:51:25.243,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/874. Updated state: dead (0) [ns_server:debug,2014-08-19T16:51:25.243,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",874,dead,0} [rebalance:debug,2014-08-19T16:51:25.243,ns_1@10.242.238.88:<0.21965.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22314.1> [rebalance:info,2014-08-19T16:51:25.244,ns_1@10.242.238.88:<0.21965.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:25.246,ns_1@10.242.238.88:<0.21965.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:25.246,ns_1@10.242.238.88:<0.21965.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.248,ns_1@10.242.238.88:<0.19507.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 880 state change: {'ns_1@10.242.238.91',active,undefined, undefined} [rebalance:debug,2014-08-19T16:51:25.250,ns_1@10.242.238.88:<0.19515.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:25.250,ns_1@10.242.238.88:<0.21968.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_625 [rebalance:info,2014-08-19T16:51:25.251,ns_1@10.242.238.88:<0.21968.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[625]}, {checkpoints,[{625,1}]}, {name,<<"rebalance_625">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.90",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[625]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"625"}]} [rebalance:debug,2014-08-19T16:51:25.252,ns_1@10.242.238.88:<0.21968.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22315.1> [rebalance:info,2014-08-19T16:51:25.252,ns_1@10.242.238.88:<0.21968.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [ns_server:info,2014-08-19T16:51:25.253,ns_1@10.242.238.88:<0.19515.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_880_'ns_1@10.242.238.89'">>] [rebalance:debug,2014-08-19T16:51:25.255,ns_1@10.242.238.88:<0.21968.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:25.255,ns_1@10.242.238.88:<0.21968.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.256,ns_1@10.242.238.88:<0.19465.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 625 state change: {'ns_1@10.242.238.90',active,undefined, undefined} [ns_server:debug,2014-08-19T16:51:25.257,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:debug,2014-08-19T16:51:25.257,ns_1@10.242.238.88:<0.19473.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:debug,2014-08-19T16:51:25.258,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:25.258,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.258,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{371, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.258,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.259,ns_1@10.242.238.88:<0.22005.1>:ebucketmigrator_srv:kill_tapname:1090]killing tap named: rebalance_375 [rebalance:info,2014-08-19T16:51:25.260,ns_1@10.242.238.88:<0.22005.1>:ebucketmigrator_srv:init:603]Starting tap stream: [{vbuckets,[375]}, {checkpoints,[{375,1}]}, {name,<<"rebalance_375">>}, {takeover,true}] {{"10.242.238.88",11209}, {"10.242.238.89",11209}, [{username,"default"}, {password,get_from_config}, {vbuckets,[375]}, {set_to_pending_state,true}, {takeover,true}, {suffix,"375"}]} [rebalance:debug,2014-08-19T16:51:25.260,ns_1@10.242.238.88:<0.22005.1>:ebucketmigrator_srv:init:640]upstream_sender pid: <0.22327.1> [ns_server:info,2014-08-19T16:51:25.260,ns_1@10.242.238.88:<0.19473.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_625_'ns_1@10.242.238.89'">>] [rebalance:info,2014-08-19T16:51:25.261,ns_1@10.242.238.88:<0.22005.1>:ebucketmigrator_srv:process_upstream:1049]TAP stream is not doing backfill [rebalance:debug,2014-08-19T16:51:25.263,ns_1@10.242.238.88:<0.22005.1>:ebucketmigrator_srv:terminate:737]Dying with reason: normal [rebalance:info,2014-08-19T16:51:25.264,ns_1@10.242.238.88:<0.22005.1>:ebucketmigrator_srv:terminate:743]Skipping close ack for successfull takover [rebalance:info,2014-08-19T16:51:25.264,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 371 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:25.265,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 371) [ns_server:debug,2014-08-19T16:51:25.266,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:25.266,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 623 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.90'}] [rebalance:info,2014-08-19T16:51:25.266,ns_1@10.242.238.88:<0.19026.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 375 state change: {'ns_1@10.242.238.89',active,undefined, undefined} [rebalance:info,2014-08-19T16:51:25.266,ns_1@10.242.238.88:<0.22331.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 623 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.90'} [rebalance:debug,2014-08-19T16:51:25.268,ns_1@10.242.238.88:<0.19034.1>:new_ns_replicas_builder:terminate:110]Dying with reason: shutdown [ns_server:info,2014-08-19T16:51:25.270,ns_1@10.242.238.88:<0.19034.1>:ns_replicas_builder_utils:kill_a_bunch_of_tap_names:59]Killed the following tap names on 'ns_1@10.242.238.88': [<<"replication_building_375_'ns_1@10.242.238.90'">>] [ns_server:debug,2014-08-19T16:51:25.293,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.294,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.294,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{623, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.90','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.294,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:25.294,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:25.301,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 623 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:25.301,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 623) [ns_server:debug,2014-08-19T16:51:25.302,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:25.302,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 376 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:25.302,ns_1@10.242.238.88:<0.22344.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 376 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:25.323,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.323,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:25.324,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.324,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{376, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.325,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [views:debug,2014-08-19T16:51:25.326,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/630. Updated state: dead (0) [ns_server:debug,2014-08-19T16:51:25.326,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",630,dead,0} [rebalance:info,2014-08-19T16:51:25.330,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 376 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:25.331,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 376) [ns_server:debug,2014-08-19T16:51:25.332,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:25.332,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 877 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:25.332,ns_1@10.242.238.88:<0.22355.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 877 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [ns_server:debug,2014-08-19T16:51:25.352,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.353,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.353,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.353,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:25.353,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{877, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [rebalance:info,2014-08-19T16:51:25.363,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 877 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:25.363,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 877) [ns_server:debug,2014-08-19T16:51:25.364,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:25.364,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 372 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:25.364,ns_1@10.242.238.88:<0.22366.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 372 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:25.386,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.386,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.387,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.387,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{372, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.89','ns_1@10.242.238.90']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.388,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [rebalance:info,2014-08-19T16:51:25.394,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 372 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:25.395,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 372) [ns_server:debug,2014-08-19T16:51:25.395,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:25.395,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 885 state change [{'ns_1@10.242.238.89',replica,undefined,'ns_1@10.242.238.91'}] [rebalance:info,2014-08-19T16:51:25.395,ns_1@10.242.238.88:<0.22377.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 885 state change: {'ns_1@10.242.238.89',replica,undefined, 'ns_1@10.242.238.91'} [views:debug,2014-08-19T16:51:25.401,ns_1@10.242.238.88:mc_couch_events<0.18035.0>:capi_set_view_manager:handle_mc_couch_event:539]Got set_vbucket event for default/628. Updated state: dead (0) [ns_server:debug,2014-08-19T16:51:25.401,ns_1@10.242.238.88:<0.19210.0>:mc_connection:do_notify_vbucket_update:126]Signaled mc_couch_event: {set_vbucket,"default",628,dead,0} [ns_server:debug,2014-08-19T16:51:25.417,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.418,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.418,ns_1@10.242.238.88:ns_config_log<0.17901.0>:ns_config_log:log_common:138]config change: buckets -> [{configs,[{"default", [{map,[{885, ['ns_1@10.242.238.88',undefined], ['ns_1@10.242.238.91','ns_1@10.242.238.89']}]}, {fastForwardMap,[]}, {uuid,<<"d95ae85dc319bab78fd23c50f6adae2e">>}, {sasl_password,"*****"}, {num_replicas,1}, {replica_index,false}, {ram_quota,13369344000}, {auth_type,sasl}, {flush_enabled,true}, {num_threads,3}, {type,membase}, {num_vbuckets,1024}, {servers,['ns_1@10.242.238.88','ns_1@10.242.238.89', 'ns_1@10.242.238.90','ns_1@10.242.238.91']}, {map_opts_hash,133465355}]}]}] [ns_server:debug,2014-08-19T16:51:25.418,ns_1@10.242.238.88:ns_config_rep<0.17926.0>:ns_config_rep:do_push_keys:317]Replicating some config keys ([buckets]..) [ns_server:debug,2014-08-19T16:51:25.418,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [rebalance:info,2014-08-19T16:51:25.430,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:on_move_done:300]Moving vbucket 885 done. Will delete it on: ['ns_1@10.242.238.88'] [ns_server:debug,2014-08-19T16:51:25.431,ns_1@10.242.238.88:<0.25606.0>:ns_rebalance_observer:handle_vbucket_move_done:281]Noted vbucket move end (vbucket 885) [ns_server:debug,2014-08-19T16:51:25.431,ns_1@10.242.238.88:<0.25746.0>:ns_vbucket_mover:spawn_workers:341]Got actions: [] [rebalance:info,2014-08-19T16:51:25.431,ns_1@10.242.238.88:<0.25746.0>:janitor_agent:bulk_set_vbucket_state:485]Doing bulk vbucket 367 state change [{'ns_1@10.242.238.90',replica,undefined,'ns_1@10.242.238.89'}] [rebalance:info,2014-08-19T16:51:25.431,ns_1@10.242.238.88:<0.22388.1>:janitor_agent:set_vbucket_state:518]Doing vbucket 367 state change: {'ns_1@10.242.238.90',replica,undefined, 'ns_1@10.242.238.89'} [ns_server:debug,2014-08-19T16:51:25.457,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs [ns_server:debug,2014-08-19T16:51:25.458,ns_1@10.242.238.88:capi_set_view_manager-default<0.19185.0>:capi_set_view_manager:handle_info:359]doing replicate_newnodes_docs